var/home/core/zuul-output/0000755000175000017500000000000015113366246014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113377661015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005123734215113377652017716 0ustar rootrootDec 01 19:33:19 crc systemd[1]: Starting Kubernetes Kubelet... Dec 01 19:33:19 crc restorecon[4691]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:19 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 19:33:20 crc restorecon[4691]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 01 19:33:20 crc kubenswrapper[4888]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.295150 4888 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298094 4888 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298112 4888 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298117 4888 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298124 4888 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298129 4888 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298134 4888 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298138 4888 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298143 4888 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298147 4888 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298154 4888 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298159 4888 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298164 4888 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298170 4888 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298175 4888 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298195 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298199 4888 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298203 4888 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298206 4888 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298211 4888 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298215 4888 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298218 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298232 4888 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298235 4888 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298239 4888 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298243 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298246 4888 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298250 4888 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298254 4888 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298258 4888 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298262 4888 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298266 4888 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298269 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298273 4888 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298276 4888 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298280 4888 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298284 4888 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298288 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298293 4888 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298299 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298303 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298307 4888 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298312 4888 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298317 4888 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298321 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298325 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298329 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298332 4888 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298336 4888 feature_gate.go:330] unrecognized feature gate: Example Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298340 4888 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298344 4888 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298348 4888 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298352 4888 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298356 4888 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298360 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298363 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298367 4888 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298371 4888 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298375 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298378 4888 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298382 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298385 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298389 4888 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298392 4888 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298395 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298399 4888 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298402 4888 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298405 4888 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298409 4888 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298413 4888 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298422 4888 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.298427 4888 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298647 4888 flags.go:64] FLAG: --address="0.0.0.0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298658 4888 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298668 4888 flags.go:64] FLAG: --anonymous-auth="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298675 4888 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298681 4888 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298686 4888 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298692 4888 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298698 4888 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298702 4888 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298706 4888 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298711 4888 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298716 4888 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298720 4888 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298725 4888 flags.go:64] FLAG: --cgroup-root="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298731 4888 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298735 4888 flags.go:64] FLAG: --client-ca-file="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298739 4888 flags.go:64] FLAG: --cloud-config="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298743 4888 flags.go:64] FLAG: --cloud-provider="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298747 4888 flags.go:64] FLAG: --cluster-dns="[]" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298752 4888 flags.go:64] FLAG: --cluster-domain="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298756 4888 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298760 4888 flags.go:64] FLAG: --config-dir="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298764 4888 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298768 4888 flags.go:64] FLAG: --container-log-max-files="5" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298774 4888 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298778 4888 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298782 4888 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298786 4888 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298791 4888 flags.go:64] FLAG: --contention-profiling="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298795 4888 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298799 4888 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298804 4888 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298808 4888 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298813 4888 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298818 4888 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298822 4888 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298826 4888 flags.go:64] FLAG: --enable-load-reader="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298830 4888 flags.go:64] FLAG: --enable-server="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298834 4888 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298844 4888 flags.go:64] FLAG: --event-burst="100" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298848 4888 flags.go:64] FLAG: --event-qps="50" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298853 4888 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298857 4888 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298861 4888 flags.go:64] FLAG: --eviction-hard="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298866 4888 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298870 4888 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298874 4888 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298879 4888 flags.go:64] FLAG: --eviction-soft="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298882 4888 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298886 4888 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298895 4888 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298900 4888 flags.go:64] FLAG: --experimental-mounter-path="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298903 4888 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298907 4888 flags.go:64] FLAG: --fail-swap-on="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298911 4888 flags.go:64] FLAG: --feature-gates="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298921 4888 flags.go:64] FLAG: --file-check-frequency="20s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298925 4888 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298929 4888 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298933 4888 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298938 4888 flags.go:64] FLAG: --healthz-port="10248" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298942 4888 flags.go:64] FLAG: --help="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298946 4888 flags.go:64] FLAG: --hostname-override="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298949 4888 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298953 4888 flags.go:64] FLAG: --http-check-frequency="20s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298957 4888 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298961 4888 flags.go:64] FLAG: --image-credential-provider-config="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298965 4888 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298970 4888 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298974 4888 flags.go:64] FLAG: --image-service-endpoint="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298978 4888 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298982 4888 flags.go:64] FLAG: --kube-api-burst="100" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298986 4888 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298990 4888 flags.go:64] FLAG: --kube-api-qps="50" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298994 4888 flags.go:64] FLAG: --kube-reserved="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.298998 4888 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299002 4888 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299006 4888 flags.go:64] FLAG: --kubelet-cgroups="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299010 4888 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299014 4888 flags.go:64] FLAG: --lock-file="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299018 4888 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299022 4888 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299026 4888 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299032 4888 flags.go:64] FLAG: --log-json-split-stream="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299036 4888 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299040 4888 flags.go:64] FLAG: --log-text-split-stream="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299044 4888 flags.go:64] FLAG: --logging-format="text" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299048 4888 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299053 4888 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299059 4888 flags.go:64] FLAG: --manifest-url="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299063 4888 flags.go:64] FLAG: --manifest-url-header="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299068 4888 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299073 4888 flags.go:64] FLAG: --max-open-files="1000000" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299078 4888 flags.go:64] FLAG: --max-pods="110" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299082 4888 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299086 4888 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299090 4888 flags.go:64] FLAG: --memory-manager-policy="None" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299094 4888 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299098 4888 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299104 4888 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299109 4888 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299118 4888 flags.go:64] FLAG: --node-status-max-images="50" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299122 4888 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299126 4888 flags.go:64] FLAG: --oom-score-adj="-999" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299130 4888 flags.go:64] FLAG: --pod-cidr="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299134 4888 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299141 4888 flags.go:64] FLAG: --pod-manifest-path="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299145 4888 flags.go:64] FLAG: --pod-max-pids="-1" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299149 4888 flags.go:64] FLAG: --pods-per-core="0" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299154 4888 flags.go:64] FLAG: --port="10250" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299158 4888 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299162 4888 flags.go:64] FLAG: --provider-id="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299166 4888 flags.go:64] FLAG: --qos-reserved="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299170 4888 flags.go:64] FLAG: --read-only-port="10255" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299174 4888 flags.go:64] FLAG: --register-node="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299178 4888 flags.go:64] FLAG: --register-schedulable="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299201 4888 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299209 4888 flags.go:64] FLAG: --registry-burst="10" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299213 4888 flags.go:64] FLAG: --registry-qps="5" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299217 4888 flags.go:64] FLAG: --reserved-cpus="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299221 4888 flags.go:64] FLAG: --reserved-memory="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299227 4888 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299232 4888 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299237 4888 flags.go:64] FLAG: --rotate-certificates="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299241 4888 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299245 4888 flags.go:64] FLAG: --runonce="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299250 4888 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299254 4888 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299258 4888 flags.go:64] FLAG: --seccomp-default="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299262 4888 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299266 4888 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299270 4888 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299275 4888 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299280 4888 flags.go:64] FLAG: --storage-driver-password="root" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299284 4888 flags.go:64] FLAG: --storage-driver-secure="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299288 4888 flags.go:64] FLAG: --storage-driver-table="stats" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299292 4888 flags.go:64] FLAG: --storage-driver-user="root" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299296 4888 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299300 4888 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299305 4888 flags.go:64] FLAG: --system-cgroups="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299309 4888 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299315 4888 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299319 4888 flags.go:64] FLAG: --tls-cert-file="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299323 4888 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299331 4888 flags.go:64] FLAG: --tls-min-version="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299335 4888 flags.go:64] FLAG: --tls-private-key-file="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299339 4888 flags.go:64] FLAG: --topology-manager-policy="none" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299344 4888 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299348 4888 flags.go:64] FLAG: --topology-manager-scope="container" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299353 4888 flags.go:64] FLAG: --v="2" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299360 4888 flags.go:64] FLAG: --version="false" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299366 4888 flags.go:64] FLAG: --vmodule="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299371 4888 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299377 4888 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299490 4888 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299495 4888 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299499 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299503 4888 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299507 4888 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299511 4888 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299514 4888 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299518 4888 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299522 4888 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299525 4888 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299529 4888 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299533 4888 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299536 4888 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299540 4888 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299543 4888 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299547 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299550 4888 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299554 4888 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299557 4888 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299561 4888 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299564 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299568 4888 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299571 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299575 4888 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299578 4888 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299582 4888 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299585 4888 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299588 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299592 4888 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299595 4888 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299599 4888 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299603 4888 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299606 4888 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299610 4888 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299614 4888 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299617 4888 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299622 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299625 4888 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299630 4888 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299634 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299639 4888 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299644 4888 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299648 4888 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299653 4888 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299656 4888 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299660 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299664 4888 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299667 4888 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299671 4888 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299674 4888 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299678 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299681 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299685 4888 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299688 4888 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299692 4888 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299696 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299699 4888 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299703 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299707 4888 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299711 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299716 4888 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299720 4888 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299724 4888 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299728 4888 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299732 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299735 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299739 4888 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299744 4888 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299748 4888 feature_gate.go:330] unrecognized feature gate: Example Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299751 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.299754 4888 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.299761 4888 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.308326 4888 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.308351 4888 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308406 4888 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308414 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308419 4888 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308424 4888 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308428 4888 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308433 4888 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308438 4888 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308444 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308448 4888 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308453 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308457 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308460 4888 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308464 4888 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308468 4888 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308471 4888 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308475 4888 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308479 4888 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308482 4888 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308486 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308490 4888 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308494 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308498 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308502 4888 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308505 4888 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308509 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308513 4888 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308517 4888 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308521 4888 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308526 4888 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308531 4888 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308535 4888 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308539 4888 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308543 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308547 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308552 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308556 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308559 4888 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308563 4888 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308567 4888 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308572 4888 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308577 4888 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308581 4888 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308584 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308588 4888 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308593 4888 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308597 4888 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308601 4888 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308605 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308609 4888 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308613 4888 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308616 4888 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308620 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308623 4888 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308627 4888 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308631 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308635 4888 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308639 4888 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308642 4888 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308646 4888 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308650 4888 feature_gate.go:330] unrecognized feature gate: Example Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308653 4888 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308657 4888 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308660 4888 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308665 4888 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308670 4888 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308674 4888 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308679 4888 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308683 4888 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308687 4888 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308691 4888 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308695 4888 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.308702 4888 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308805 4888 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308813 4888 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308817 4888 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308821 4888 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308825 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308829 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308832 4888 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308835 4888 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308841 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308846 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308851 4888 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308855 4888 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308859 4888 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308864 4888 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308869 4888 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308873 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308877 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308881 4888 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308885 4888 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308889 4888 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308892 4888 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308896 4888 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308899 4888 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308903 4888 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308907 4888 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308912 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308916 4888 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308919 4888 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308924 4888 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308929 4888 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308934 4888 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308937 4888 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308941 4888 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308945 4888 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308950 4888 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308954 4888 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308957 4888 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308961 4888 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308964 4888 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308968 4888 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308971 4888 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308975 4888 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308979 4888 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308984 4888 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308988 4888 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308992 4888 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.308996 4888 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309000 4888 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309004 4888 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309008 4888 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309012 4888 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309017 4888 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309020 4888 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309024 4888 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309028 4888 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309031 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309035 4888 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309039 4888 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309042 4888 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309046 4888 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309049 4888 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309053 4888 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309056 4888 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309060 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309066 4888 feature_gate.go:330] unrecognized feature gate: Example Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309071 4888 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309075 4888 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309080 4888 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309084 4888 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309087 4888 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.309092 4888 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.309097 4888 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.309406 4888 server.go:940] "Client rotation is on, will bootstrap in background" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.311468 4888 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.311535 4888 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.311996 4888 server.go:997] "Starting client certificate rotation" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.312012 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.312291 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-25 09:18:25.361496829 +0000 UTC Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.312340 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.325709 4888 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.327521 4888 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.330777 4888 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.339029 4888 log.go:25] "Validated CRI v1 runtime API" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.355521 4888 log.go:25] "Validated CRI v1 image API" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.357088 4888 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.360733 4888 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-01-19-28-33-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.360768 4888 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.374373 4888 manager.go:217] Machine: {Timestamp:2025-12-01 19:33:20.372701725 +0000 UTC m=+0.243731649 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:c809babe-48a2-4ca7-84bd-33ee12868d2c BootID:66ac84ba-9185-46f7-8b54-e8d4aaa37c1c Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:86:e2:20 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:86:e2:20 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e3:d8:a2 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:d2:71:37 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:64:92:0d Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:11:85:f9 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:2e:b8:56:3f:f6 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6a:43:b4:07:e3:69 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.374608 4888 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.374796 4888 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375341 4888 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375525 4888 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375562 4888 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375776 4888 topology_manager.go:138] "Creating topology manager with none policy" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375787 4888 container_manager_linux.go:303] "Creating device plugin manager" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.375944 4888 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.376107 4888 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.376577 4888 state_mem.go:36] "Initialized new in-memory state store" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.376661 4888 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.377515 4888 kubelet.go:418] "Attempting to sync node with API server" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.377539 4888 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.377579 4888 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.377594 4888 kubelet.go:324] "Adding apiserver pod source" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.377606 4888 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.379725 4888 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.380722 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.380822 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.380818 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.381095 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.381321 4888 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.384103 4888 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.384951 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.384986 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385000 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385023 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385042 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385054 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385067 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385086 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385100 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385113 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385140 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385153 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.385433 4888 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.386566 4888 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.386753 4888 server.go:1280] "Started kubelet" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.387121 4888 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.387276 4888 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.388277 4888 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.388934 4888 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.388980 4888 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.389047 4888 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.389283 4888 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.389291 4888 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.389225 4888 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.389060 4888 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 02:46:42.899576081 +0000 UTC Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.390072 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:20 crc systemd[1]: Started Kubernetes Kubelet. Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.390204 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.391797 4888 server.go:460] "Adding debug handlers to kubelet server" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.392789 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="200ms" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.393096 4888 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.163:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d2e5bb36a796a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 19:33:20.386103658 +0000 UTC m=+0.257133602,LastTimestamp:2025-12-01 19:33:20.386103658 +0000 UTC m=+0.257133602,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.395877 4888 factory.go:55] Registering systemd factory Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.395909 4888 factory.go:221] Registration of the systemd container factory successfully Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.396281 4888 factory.go:153] Registering CRI-O factory Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.396316 4888 factory.go:221] Registration of the crio container factory successfully Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.396421 4888 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.396466 4888 factory.go:103] Registering Raw factory Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.396491 4888 manager.go:1196] Started watching for new ooms in manager Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.397301 4888 manager.go:319] Starting recovery of all containers Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399389 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399492 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399513 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399529 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399549 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399561 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399578 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399591 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399612 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399624 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399637 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399658 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399671 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399701 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399712 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399731 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399742 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399757 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399767 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399789 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399844 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399860 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399869 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399882 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399892 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399908 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399920 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399935 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399947 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399956 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399965 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399983 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.399993 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400002 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400015 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400025 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400040 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400049 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400058 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400071 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400083 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400098 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400109 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400121 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400134 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400142 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400154 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400164 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400173 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400201 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400213 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400225 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400239 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400254 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400267 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400279 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400293 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400303 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400880 4888 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400914 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400929 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400940 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400955 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400971 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400985 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.400995 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401008 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401024 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401033 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401046 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401101 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401112 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401127 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401143 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401152 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401169 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401200 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401214 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401225 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401236 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401274 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401284 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401301 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401312 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401323 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401341 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401351 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401369 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401379 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401389 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401405 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401419 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401435 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401446 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401540 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401559 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401569 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401581 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401599 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401851 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401879 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401893 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401908 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401927 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.401940 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402020 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402050 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402068 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402090 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402112 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402128 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402148 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402270 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402289 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402314 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402331 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402349 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402363 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402376 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402394 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402409 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402427 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402439 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402454 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402471 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402484 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402504 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402520 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402535 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402555 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402569 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402588 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402606 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402618 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402639 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402654 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402672 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402685 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402699 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402717 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402732 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402745 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402760 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402775 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402791 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402804 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402822 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402840 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.402857 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403257 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403317 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403409 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403469 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403484 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403509 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403528 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403562 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403577 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403593 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403652 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403666 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403682 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403697 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403711 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403731 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403756 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403773 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403786 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403799 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403818 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403833 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403848 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403860 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403873 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403888 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.403902 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.404619 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.404655 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.404672 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.406945 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.406993 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407011 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407028 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407045 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407064 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407081 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407098 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407116 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407133 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407148 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407164 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407207 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407224 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407241 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407258 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407274 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407291 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407308 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407323 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407341 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407357 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407372 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407390 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407405 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407421 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407437 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407454 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407469 4888 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407485 4888 reconstruct.go:97] "Volume reconstruction finished" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.407498 4888 reconciler.go:26] "Reconciler: start to sync state" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.429451 4888 manager.go:324] Recovery completed Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.442943 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.444999 4888 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.445160 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.445260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.445279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.447205 4888 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.447230 4888 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.447258 4888 state_mem.go:36] "Initialized new in-memory state store" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.449841 4888 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.449923 4888 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.449962 4888 kubelet.go:2335] "Starting kubelet main sync loop" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.450020 4888 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.451238 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.451352 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.457065 4888 policy_none.go:49] "None policy: Start" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.458004 4888 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.458115 4888 state_mem.go:35] "Initializing new in-memory state store" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.489486 4888 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.506371 4888 manager.go:334] "Starting Device Plugin manager" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.506427 4888 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.506444 4888 server.go:79] "Starting device plugin registration server" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.506898 4888 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.506917 4888 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.507150 4888 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.507237 4888 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.507245 4888 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.514915 4888 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.551024 4888 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.551161 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.552892 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.553050 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.553217 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.553528 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.553775 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.553826 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.554951 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555007 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555113 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555136 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555148 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555238 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555574 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.555723 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556462 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556570 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556658 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556865 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.556882 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.557100 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.557141 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.557213 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558091 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558122 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558615 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.558716 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.559550 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.559847 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.559916 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562418 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562574 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562622 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.562644 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.563917 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.563966 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.564667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.564692 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.564704 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.593846 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="400ms" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.607020 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.608176 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.608325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.608389 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.608465 4888 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.609148 4888 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.163:6443: connect: connection refused" node="crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611424 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611489 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611539 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611575 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611607 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611649 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611690 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611723 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611763 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611852 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611904 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.611993 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.612113 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.612232 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.612332 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.713818 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.713931 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.713974 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714003 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714034 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714049 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714101 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714062 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714166 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714244 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714241 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714278 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714298 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714311 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714341 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714344 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714385 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714418 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714447 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714453 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714481 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714505 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714546 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714387 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714596 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714630 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714422 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714680 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714717 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.714751 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.809303 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.811227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.811264 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.811274 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.811297 4888 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.812274 4888 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.163:6443: connect: connection refused" node="crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.890533 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.919630 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.928416 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.941781 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.946954 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-c602bc845cba318a952a718304aedb96ee5b5805a9026a01545153e3c3b8a182 WatchSource:0}: Error finding container c602bc845cba318a952a718304aedb96ee5b5805a9026a01545153e3c3b8a182: Status 404 returned error can't find the container with id c602bc845cba318a952a718304aedb96ee5b5805a9026a01545153e3c3b8a182 Dec 01 19:33:20 crc kubenswrapper[4888]: I1201 19:33:20.947829 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.966621 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-121706cedaf329f50f89966a4713e6eec7bc7cb869b25cbf0de0973134d6b49f WatchSource:0}: Error finding container 121706cedaf329f50f89966a4713e6eec7bc7cb869b25cbf0de0973134d6b49f: Status 404 returned error can't find the container with id 121706cedaf329f50f89966a4713e6eec7bc7cb869b25cbf0de0973134d6b49f Dec 01 19:33:20 crc kubenswrapper[4888]: W1201 19:33:20.973466 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-4a6a2f6b3fa698a508802e79202b5f1f947aae7f65f7cd9b337bcadc10d420cf WatchSource:0}: Error finding container 4a6a2f6b3fa698a508802e79202b5f1f947aae7f65f7cd9b337bcadc10d420cf: Status 404 returned error can't find the container with id 4a6a2f6b3fa698a508802e79202b5f1f947aae7f65f7cd9b337bcadc10d420cf Dec 01 19:33:20 crc kubenswrapper[4888]: E1201 19:33:20.994845 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="800ms" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.212650 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.214068 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.214109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.214123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.214206 4888 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.214628 4888 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.163:6443: connect: connection refused" node="crc" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.388005 4888 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.390080 4888 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 15:44:53.581904989 +0000 UTC Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.390180 4888 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 620h11m32.191733582s for next certificate rotation Dec 01 19:33:21 crc kubenswrapper[4888]: W1201 19:33:21.398878 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.399002 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:21 crc kubenswrapper[4888]: W1201 19:33:21.437474 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.437551 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.455622 4888 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="783f431aa2e798cfad1af57cba2486a0542db54a26c080054d0171150eccc4ea" exitCode=0 Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.455719 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"783f431aa2e798cfad1af57cba2486a0542db54a26c080054d0171150eccc4ea"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.455818 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c602bc845cba318a952a718304aedb96ee5b5805a9026a01545153e3c3b8a182"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.455891 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457085 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457158 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457645 4888 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609" exitCode=0 Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457695 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457711 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"345eeca4e3f2e82a30d36041af83172e33e8435c11151841e0e6ba5effcb1410"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.457762 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.458655 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.458678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.458686 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.460130 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.460161 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e27ef54f09dce0c3f10f3718546e6e073fccdf5ec1b9deb55eae809bc5e6fa3f"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.461705 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245" exitCode=0 Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.461752 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.461767 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4a6a2f6b3fa698a508802e79202b5f1f947aae7f65f7cd9b337bcadc10d420cf"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.461840 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.462539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.462573 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.462589 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.463735 4888 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38" exitCode=0 Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.463773 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.463795 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"121706cedaf329f50f89966a4713e6eec7bc7cb869b25cbf0de0973134d6b49f"} Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.463893 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.464598 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.464633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.464646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.466758 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.467762 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.467792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:21 crc kubenswrapper[4888]: I1201 19:33:21.467802 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:21 crc kubenswrapper[4888]: W1201 19:33:21.752201 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.752361 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.796374 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="1.6s" Dec 01 19:33:21 crc kubenswrapper[4888]: W1201 19:33:21.860845 4888 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:33:21 crc kubenswrapper[4888]: E1201 19:33:21.860932 4888 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.163:6443: connect: connection refused" logger="UnhandledError" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.015288 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.016856 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.016927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.016973 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.017012 4888 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.348437 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.467903 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.467966 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.467980 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.468099 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469099 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469129 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469820 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469882 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469897 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.469911 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.470858 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.470896 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.470907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.473277 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.473314 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.473324 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.473332 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.474896 4888 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f" exitCode=0 Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.474943 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.475176 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.476030 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.476060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.476071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.477587 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e47c76ce206a2e9bafeb6931c5b1e7eab2de9f751b23a4fb313d7e80bce2522d"} Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.477654 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.478296 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.478323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:22 crc kubenswrapper[4888]: I1201 19:33:22.478335 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.425725 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.487411 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976"} Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.488261 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.489348 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.489379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.489388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.490717 4888 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857" exitCode=0 Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.490785 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857"} Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.490849 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.490904 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.491924 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.491974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.491991 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.492397 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.492573 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:23 crc kubenswrapper[4888]: I1201 19:33:23.492700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496557 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"029c98a5fa0e3afc0bba50293a88bb7978a002bff6c87cdb9cf86c0165a95e09"} Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496601 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496611 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bab3cb8d2dec74792ffcf0d50622194a297e1bec87ebbf8d2ad4ff3ad2b460a2"} Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496625 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4b6229a03c2ac283713c2bbdb42ae3486a742f13a0d8f36221c926b3a1f6839b"} Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496637 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a082e7eb03d0a6e1a89af241ad1bdec5406448a5a5c008014b14b589d6e0b381"} Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496629 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.496615 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497564 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497583 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497590 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497615 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.497624 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:24 crc kubenswrapper[4888]: I1201 19:33:24.774734 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.503727 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2c3f8a6d3dfbe9fe4c6304608f75951ba4499463f344d4ce2d76e00ebc544082"} Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.503771 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.503815 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.503818 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.504855 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.504885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.504895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.504907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.505263 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:25 crc kubenswrapper[4888]: I1201 19:33:25.505304 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:26 crc kubenswrapper[4888]: I1201 19:33:26.380148 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 01 19:33:26 crc kubenswrapper[4888]: I1201 19:33:26.509573 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:26 crc kubenswrapper[4888]: I1201 19:33:26.511412 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:26 crc kubenswrapper[4888]: I1201 19:33:26.511480 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:26 crc kubenswrapper[4888]: I1201 19:33:26.511503 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.247262 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.247741 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.247814 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.249979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.250066 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.250090 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.512984 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.514778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.514829 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:27 crc kubenswrapper[4888]: I1201 19:33:27.514838 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.851706 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.852377 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.854239 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.854323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.854344 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.897012 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.897249 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.898589 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.898627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.898640 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:28 crc kubenswrapper[4888]: I1201 19:33:28.904329 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:29 crc kubenswrapper[4888]: I1201 19:33:29.519299 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:29 crc kubenswrapper[4888]: I1201 19:33:29.520445 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:29 crc kubenswrapper[4888]: I1201 19:33:29.520520 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:29 crc kubenswrapper[4888]: I1201 19:33:29.520545 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.343497 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:30 crc kubenswrapper[4888]: E1201 19:33:30.515165 4888 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.521711 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.522793 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.522865 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.522889 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.732763 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.733027 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.734983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.735041 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.735060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:30 crc kubenswrapper[4888]: I1201 19:33:30.985060 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:31 crc kubenswrapper[4888]: I1201 19:33:31.529781 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:31 crc kubenswrapper[4888]: I1201 19:33:31.532174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:31 crc kubenswrapper[4888]: I1201 19:33:31.532309 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:31 crc kubenswrapper[4888]: I1201 19:33:31.532339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:31 crc kubenswrapper[4888]: I1201 19:33:31.536943 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:32 crc kubenswrapper[4888]: E1201 19:33:32.019966 4888 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 01 19:33:32 crc kubenswrapper[4888]: E1201 19:33:32.354026 4888 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 01 19:33:32 crc kubenswrapper[4888]: I1201 19:33:32.388988 4888 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 01 19:33:32 crc kubenswrapper[4888]: I1201 19:33:32.532357 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:32 crc kubenswrapper[4888]: I1201 19:33:32.533213 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:32 crc kubenswrapper[4888]: I1201 19:33:32.533281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:32 crc kubenswrapper[4888]: I1201 19:33:32.533291 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.344503 4888 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.344586 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.350504 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.350542 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.369390 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.369462 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.620370 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.621663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.621698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.621726 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:33 crc kubenswrapper[4888]: I1201 19:33:33.621791 4888 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.003649 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.003886 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.005611 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.005700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.005720 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.075437 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.535754 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.537088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.537153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.537172 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.548285 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.783380 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.783576 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.785066 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.785106 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.785116 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:34 crc kubenswrapper[4888]: I1201 19:33:34.787539 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.537851 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.537855 4888 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.538711 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.538749 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.538767 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.539023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.539754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:35 crc kubenswrapper[4888]: I1201 19:33:35.539818 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:36 crc kubenswrapper[4888]: I1201 19:33:36.534858 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 01 19:33:36 crc kubenswrapper[4888]: I1201 19:33:36.554827 4888 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.369875 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.372460 4888 trace.go:236] Trace[1469304347]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 19:33:24.830) (total time: 13541ms): Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[1469304347]: ---"Objects listed" error: 13541ms (19:33:38.372) Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[1469304347]: [13.541605957s] [13.541605957s] END Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.372502 4888 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.372465 4888 trace.go:236] Trace[1529393661]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 19:33:23.516) (total time: 14856ms): Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[1529393661]: ---"Objects listed" error: 14856ms (19:33:38.372) Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[1529393661]: [14.856162241s] [14.856162241s] END Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.372592 4888 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.374363 4888 trace.go:236] Trace[872708319]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 19:33:24.563) (total time: 13810ms): Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[872708319]: ---"Objects listed" error: 13810ms (19:33:38.374) Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[872708319]: [13.810870115s] [13.810870115s] END Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.374398 4888 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.376455 4888 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.377696 4888 trace.go:236] Trace[843376039]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 19:33:24.592) (total time: 13785ms): Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[843376039]: ---"Objects listed" error: 13785ms (19:33:38.377) Dec 01 19:33:38 crc kubenswrapper[4888]: Trace[843376039]: [13.785333617s] [13.785333617s] END Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.377737 4888 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.389000 4888 apiserver.go:52] "Watching apiserver" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.393957 4888 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.394398 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.394956 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.395120 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.394969 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.395172 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.395160 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.395615 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.395707 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.395837 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.395939 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.398375 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.401991 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.402209 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.402377 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.402610 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.403373 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.403487 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.403570 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.403939 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.447728 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.460291 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.471677 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.480657 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.489706 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.489996 4888 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.497914 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.507649 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578219 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578265 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578283 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578301 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578317 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578345 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578382 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578397 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578413 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578428 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578444 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578458 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578473 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578489 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578504 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578519 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578544 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578562 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578578 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578580 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578595 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578659 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578687 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578702 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578718 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578733 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578749 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578762 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578778 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578794 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578808 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578822 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578839 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578852 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578867 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578881 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578897 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578912 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578926 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578967 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578982 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579111 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579133 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579155 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579171 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579236 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579258 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579273 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579289 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579305 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579322 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579337 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578613 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579361 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.578613 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579128 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579262 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579410 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579319 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579340 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579505 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579564 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579582 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579594 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579696 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579735 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579749 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579795 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579907 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579974 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580020 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580043 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580122 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580138 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580241 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.580258 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:33:39.080240297 +0000 UTC m=+18.951270211 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580300 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580354 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580435 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580478 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580521 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580515 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580640 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580806 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580833 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580870 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580882 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.580991 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581112 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581134 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.579353 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581232 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581269 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581291 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581314 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581339 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581696 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581732 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581756 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581775 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581794 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581825 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581844 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581864 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581885 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581905 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581927 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581947 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581969 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581151 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581991 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582011 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582031 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582052 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582071 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582097 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582121 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582144 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582164 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583638 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583676 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583699 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583730 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583752 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583779 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583803 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583827 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583849 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583874 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583898 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583921 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583944 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583967 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583989 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584012 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584558 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584586 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584603 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584617 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584630 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584645 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584663 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584678 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584692 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584709 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584727 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584747 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584809 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584831 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584851 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584871 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584899 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584920 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584943 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584962 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584981 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585003 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585024 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585044 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585064 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585087 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585112 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585132 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585153 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585172 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585212 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585237 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585260 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585281 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585576 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585606 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585629 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585651 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585674 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585702 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585723 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585744 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585766 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585788 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585805 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585821 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585839 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585905 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585981 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586005 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586027 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586042 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586058 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586072 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586089 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586106 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586127 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586147 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586168 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581176 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586207 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.581975 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582805 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.582971 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583082 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583202 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583298 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583384 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583402 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583489 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583486 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583788 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.583796 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584681 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584884 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.584904 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585253 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585271 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585421 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585507 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585607 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585743 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585779 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.585984 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586136 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586474 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586543 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586589 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587092 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.586230 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587225 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587268 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587308 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587817 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.587981 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588035 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588056 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588082 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588109 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588131 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588152 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588175 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588228 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588250 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588254 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588276 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588303 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588300 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588328 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588328 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588345 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588357 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588393 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588480 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588419 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588633 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588660 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588684 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588703 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588741 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588762 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588787 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588812 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588933 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588934 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588960 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588966 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.588984 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589005 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589098 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589153 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589310 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589374 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589417 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589495 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589645 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589692 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589755 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589813 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590002 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590066 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590092 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590118 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590157 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590261 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590297 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590339 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590386 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590444 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590467 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590514 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590540 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590563 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590885 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590926 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590938 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590949 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590966 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590976 4888 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590987 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591020 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591034 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591049 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591061 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591087 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591107 4888 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591118 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591130 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591143 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591171 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591215 4888 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591233 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591249 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591261 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591290 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591306 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591327 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591339 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591366 4888 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591376 4888 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591395 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591410 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591426 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591466 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591498 4888 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589118 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591693 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589257 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589287 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589478 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589706 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589795 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589868 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590420 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590517 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.590995 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591090 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591196 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591449 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591612 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591882 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591938 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.592042 4888 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.589240 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.592293 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.592324 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.592451 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.592989 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593105 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593266 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593241 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593310 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593528 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.594108 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.593684 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.594321 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.594465 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.594513 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.594173 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.595305 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.595078 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.595152 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.594400 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.595799 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.596209 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.596766 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.596823 4888 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.597333 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.597500 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.591392 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598227 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598165 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598260 4888 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598327 4888 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598394 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598413 4888 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598435 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598451 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598461 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.598578 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:39.098544136 +0000 UTC m=+18.969574050 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.598640 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:39.098625269 +0000 UTC m=+18.969655183 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598470 4888 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598685 4888 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598708 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598724 4888 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598884 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598908 4888 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598928 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598957 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598969 4888 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598982 4888 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598994 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599008 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599019 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599031 4888 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599042 4888 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599056 4888 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599067 4888 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599078 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599091 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599101 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599113 4888 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599123 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599139 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599154 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599168 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599202 4888 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599220 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599231 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599242 4888 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599255 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599268 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599278 4888 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599289 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599302 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599313 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599323 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599334 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599347 4888 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598771 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598906 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.598942 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599163 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599363 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599513 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599683 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599962 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.600236 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.600427 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.599766 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.600576 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.600694 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.600746 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.601065 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.602432 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.602855 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.602910 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.603528 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.603869 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.604150 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.606280 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.606569 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.606589 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.606878 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.606910 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607173 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607247 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607318 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607488 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607575 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607684 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.607801 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.608787 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.608809 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.608899 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.608969 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.609121 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.609141 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.609165 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.609257 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:39.10923487 +0000 UTC m=+18.980264784 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.609490 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.609582 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.609924 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.610088 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.610249 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.611137 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.611169 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.611202 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.611256 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:39.111239837 +0000 UTC m=+18.982269861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.611378 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.612395 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.612458 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.612990 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.614103 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.614304 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.614818 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.614835 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615005 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615298 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615349 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615357 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615523 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.615958 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.616165 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.617855 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.618336 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.618553 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.618630 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.618886 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.620064 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.621082 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.622901 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.623233 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.623958 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.624069 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.624325 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.624559 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.627938 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.629675 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.631331 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.631592 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.631645 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.637512 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.638561 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.640932 4888 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.641870 4888 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.641736 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.644442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.644474 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.644482 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.644496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.644506 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.652228 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.654075 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.664406 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.664803 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.669614 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.669650 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.669660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.669675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.669685 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.677866 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.681396 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.681432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.681447 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.681464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.681475 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.690392 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.693012 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.693169 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.693178 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.693232 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.693245 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700224 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700310 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700378 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700394 4888 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700407 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700422 4888 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700434 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700446 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700459 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700456 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700474 4888 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700521 4888 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700535 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700546 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700556 4888 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700567 4888 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700575 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700584 4888 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700592 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700602 4888 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700610 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700619 4888 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700627 4888 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700636 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700644 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700653 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700407 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700662 4888 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700705 4888 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700718 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700731 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700744 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700756 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700768 4888 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700780 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700794 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700840 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700856 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700868 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700880 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700894 4888 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700929 4888 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700949 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700963 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.700976 4888 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701011 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701030 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701047 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701079 4888 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701092 4888 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701106 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.700851 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701121 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701176 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701219 4888 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701230 4888 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701242 4888 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701256 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701291 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701301 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701310 4888 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701320 4888 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701328 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701337 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701347 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701375 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701388 4888 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701396 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701405 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701414 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701425 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701452 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701461 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701470 4888 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701479 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701487 4888 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701495 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701503 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701529 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701538 4888 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701547 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701556 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701567 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701576 4888 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701584 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701611 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701622 4888 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701640 4888 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701649 4888 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701659 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701687 4888 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701697 4888 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701706 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701715 4888 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701729 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701739 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701774 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701787 4888 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701798 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701808 4888 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701820 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701849 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701858 4888 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701868 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701880 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701895 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701929 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701945 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701967 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701978 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.701991 4888 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702002 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702014 4888 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702037 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702050 4888 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702063 4888 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702076 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702096 4888 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702108 4888 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702120 4888 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.702132 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.703640 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.703689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.703698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.703711 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.703723 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.712018 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: E1201 19:33:38.712245 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.713982 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.714006 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.714014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.714029 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.714060 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.728771 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.744543 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:55038->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.744700 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:55038->192.168.126.11:17697: read: connection reset by peer" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.744553 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:55040->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.745152 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:55040->192.168.126.11:17697: read: connection reset by peer" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.744915 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.745584 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.745608 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.757797 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 19:33:38 crc kubenswrapper[4888]: W1201 19:33:38.758062 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-0503094e9a0c4950dd298064e49c0440c1a858f03920a4ca3f519430c3ebe0c7 WatchSource:0}: Error finding container 0503094e9a0c4950dd298064e49c0440c1a858f03920a4ca3f519430c3ebe0c7: Status 404 returned error can't find the container with id 0503094e9a0c4950dd298064e49c0440c1a858f03920a4ca3f519430c3ebe0c7 Dec 01 19:33:38 crc kubenswrapper[4888]: W1201 19:33:38.766149 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-f3c04e4176aa717fc308d090d3818a195f2b50a2d29633917935d9ba8eeaccb8 WatchSource:0}: Error finding container f3c04e4176aa717fc308d090d3818a195f2b50a2d29633917935d9ba8eeaccb8: Status 404 returned error can't find the container with id f3c04e4176aa717fc308d090d3818a195f2b50a2d29633917935d9ba8eeaccb8 Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.817380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.817410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.817419 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.817443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.817455 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.852678 4888 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.852757 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.925960 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.925994 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.926005 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.926019 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:38 crc kubenswrapper[4888]: I1201 19:33:38.926030 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:38Z","lastTransitionTime":"2025-12-01T19:33:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.028625 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.028654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.028661 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.028675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.028684 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.106317 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.106458 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.106528 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.106683 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.106761 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:40.106744509 +0000 UTC m=+19.977774423 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.106941 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.106995 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:33:40.106969885 +0000 UTC m=+19.977999839 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.107021 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:40.107011497 +0000 UTC m=+19.978041521 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.131227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.131288 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.131300 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.131315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.131346 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.207807 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.207860 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208060 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208076 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208086 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208117 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208125 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208132 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208225 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:40.208162695 +0000 UTC m=+20.079192609 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:39 crc kubenswrapper[4888]: E1201 19:33:39.208243 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:40.208235327 +0000 UTC m=+20.079265241 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.233663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.233705 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.233714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.233728 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.233736 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.335844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.335879 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.335888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.335901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.335909 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.438713 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.438766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.438777 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.438799 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.438808 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.540895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.540924 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.540932 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.540945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.540954 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.550120 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f3c04e4176aa717fc308d090d3818a195f2b50a2d29633917935d9ba8eeaccb8"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.551904 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.551955 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.551972 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0503094e9a0c4950dd298064e49c0440c1a858f03920a4ca3f519430c3ebe0c7"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.553349 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.553392 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f172364841c2c1537829dba9bca120a76a779d809c4c8c68316f2bba0a840470"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.555220 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.556639 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976" exitCode=255 Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.556668 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.565479 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.565939 4888 scope.go:117] "RemoveContainer" containerID="cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.565996 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.582840 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.627982 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.643233 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.643263 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.643271 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.643284 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.643292 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.650937 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.664647 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.682740 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.698617 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.710507 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.723512 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.736856 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.749667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.749721 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.749735 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.749754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.749805 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.753398 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.769629 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.789663 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:39Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.851666 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.851698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.851707 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.851721 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.851731 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.866770 4888 csr.go:261] certificate signing request csr-c4nqf is approved, waiting to be issued Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.954279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.954319 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.954328 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.954345 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.954355 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:39Z","lastTransitionTime":"2025-12-01T19:33:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:39 crc kubenswrapper[4888]: I1201 19:33:39.969879 4888 csr.go:257] certificate signing request csr-c4nqf is issued Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.057042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.057367 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.057455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.057523 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.057587 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.115636 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.115845 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:33:42.115813136 +0000 UTC m=+21.986843070 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.116055 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.116161 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.116244 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.116302 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.116315 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:42.116304349 +0000 UTC m=+21.987334363 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.116479 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:42.116459724 +0000 UTC m=+21.987489628 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.160299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.160601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.160725 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.160832 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.160970 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.216819 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.216864 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.216970 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.216984 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.216996 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.217049 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:42.217034216 +0000 UTC m=+22.088064130 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.217438 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.217527 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.217624 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.217740 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:42.217724116 +0000 UTC m=+22.088754090 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.263339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.263705 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.263811 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.263900 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.263980 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.313373 4888 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.350249 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.353857 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.366285 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.366313 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.366322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.366335 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.366344 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.372710 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.425127 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.433385 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.450456 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.450529 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.450476 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.450587 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.450683 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.450755 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.454109 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.455602 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.456542 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.457254 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.457923 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.458487 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.459134 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.459736 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.460366 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.460881 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.463908 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.466193 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.467013 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.467554 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468049 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468053 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468074 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468089 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468100 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.468551 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.469050 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.469682 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.470087 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.470728 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.472806 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.473274 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.473825 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.474651 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.475345 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.476715 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.477571 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.479026 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.479536 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.480707 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.481282 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.481828 4888 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.481931 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.485121 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.485871 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.487262 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.489997 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.490819 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.491354 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.492017 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.492683 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.493151 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.493786 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.494268 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.494450 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.495028 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.495495 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.496006 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.496556 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.497314 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.497801 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.498317 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.498776 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.502143 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.502885 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.503735 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.511861 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.536977 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.561351 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.562487 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.564977 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.565021 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.570118 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.570162 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.570172 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.570200 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.570209 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: E1201 19:33:40.571200 4888 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.587519 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.605391 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.618287 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.633860 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.655296 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.671948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.672076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.672087 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.672104 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.672113 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.673061 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.685887 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.697969 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.709012 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.724218 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.747436 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.774362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.774406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.774419 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.774437 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.774449 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.776404 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.796390 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.801058 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-kjkx6"] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.803231 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-c5qc6"] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.803537 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.805140 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-jcmzp"] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.805450 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.806045 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-hfpdh"] Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.807027 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.807657 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.808347 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.808440 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.808889 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.811788 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.811865 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812073 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812139 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812151 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812296 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812329 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812337 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812441 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812506 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812649 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.812760 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.814807 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820673 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-socket-dir-parent\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820720 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-bin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820747 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820770 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-kubelet\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820792 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmq8v\" (UniqueName: \"kubernetes.io/projected/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-kube-api-access-wmq8v\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820852 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlp7b\" (UniqueName: \"kubernetes.io/projected/e511a2b8-6ef4-4788-9975-1801322e1d9d-kube-api-access-nlp7b\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820875 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6a551e8a-d979-4cdb-87f5-1075b0b49a36-mcd-auth-proxy-config\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820895 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cni-binary-copy\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820914 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-multus-certs\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820934 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-system-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820953 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-multus\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820978 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-system-cni-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.820998 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821043 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-hostroot\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821061 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-daemon-config\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821089 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e511a2b8-6ef4-4788-9975-1801322e1d9d-hosts-file\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821112 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6k46\" (UniqueName: \"kubernetes.io/projected/d432d9cf-070c-4c1f-997e-481b0087a5a4-kube-api-access-q6k46\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821137 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821158 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cnibin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821196 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-cnibin\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821220 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-conf-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821242 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-etc-kubernetes\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821282 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-netns\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821304 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-os-release\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821325 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6a551e8a-d979-4cdb-87f5-1075b0b49a36-proxy-tls\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821361 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2b6j\" (UniqueName: \"kubernetes.io/projected/6a551e8a-d979-4cdb-87f5-1075b0b49a36-kube-api-access-d2b6j\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821395 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-os-release\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821481 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821508 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-k8s-cni-cncf-io\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.821541 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6a551e8a-d979-4cdb-87f5-1075b0b49a36-rootfs\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.829336 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.846391 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.859431 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.869977 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.876603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.876626 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.876634 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.876646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.876657 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.882306 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.895661 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.911460 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922498 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e511a2b8-6ef4-4788-9975-1801322e1d9d-hosts-file\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922566 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6k46\" (UniqueName: \"kubernetes.io/projected/d432d9cf-070c-4c1f-997e-481b0087a5a4-kube-api-access-q6k46\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922591 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/e511a2b8-6ef4-4788-9975-1801322e1d9d-hosts-file\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922606 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922634 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cnibin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922658 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-conf-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922681 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-cnibin\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922703 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-netns\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922723 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-etc-kubernetes\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922743 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-os-release\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922751 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cnibin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922764 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6a551e8a-d979-4cdb-87f5-1075b0b49a36-proxy-tls\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922817 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2b6j\" (UniqueName: \"kubernetes.io/projected/6a551e8a-d979-4cdb-87f5-1075b0b49a36-kube-api-access-d2b6j\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922837 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-os-release\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922840 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-conf-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922853 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6a551e8a-d979-4cdb-87f5-1075b0b49a36-rootfs\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922870 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-cnibin\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922870 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922894 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-k8s-cni-cncf-io\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922910 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922913 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922936 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-socket-dir-parent\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922949 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-bin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922971 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-kubelet\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922987 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmq8v\" (UniqueName: \"kubernetes.io/projected/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-kube-api-access-wmq8v\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923017 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlp7b\" (UniqueName: \"kubernetes.io/projected/e511a2b8-6ef4-4788-9975-1801322e1d9d-kube-api-access-nlp7b\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923033 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6a551e8a-d979-4cdb-87f5-1075b0b49a36-mcd-auth-proxy-config\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923048 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cni-binary-copy\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.922819 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-netns\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923061 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-system-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923075 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-multus-certs\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923078 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923089 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-system-cni-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923109 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-os-release\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923119 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-k8s-cni-cncf-io\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923128 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-multus\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923145 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6a551e8a-d979-4cdb-87f5-1075b0b49a36-rootfs\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923151 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-hostroot\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923104 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-system-cni-dir\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923173 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923221 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-daemon-config\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923353 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-socket-dir-parent\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923390 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-bin\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923417 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-kubelet\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923442 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-etc-kubernetes\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923611 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d432d9cf-070c-4c1f-997e-481b0087a5a4-os-release\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923826 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923886 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-system-cni-dir\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923892 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-multus-daemon-config\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923916 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-run-multus-certs\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923936 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-host-var-lib-cni-multus\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.923950 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-hostroot\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.924054 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-cni-binary-copy\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.924250 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6a551e8a-d979-4cdb-87f5-1075b0b49a36-mcd-auth-proxy-config\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.924402 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d432d9cf-070c-4c1f-997e-481b0087a5a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.926642 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.927796 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6a551e8a-d979-4cdb-87f5-1075b0b49a36-proxy-tls\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.939835 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6k46\" (UniqueName: \"kubernetes.io/projected/d432d9cf-070c-4c1f-997e-481b0087a5a4-kube-api-access-q6k46\") pod \"multus-additional-cni-plugins-c5qc6\" (UID: \"d432d9cf-070c-4c1f-997e-481b0087a5a4\") " pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.941944 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlp7b\" (UniqueName: \"kubernetes.io/projected/e511a2b8-6ef4-4788-9975-1801322e1d9d-kube-api-access-nlp7b\") pod \"node-resolver-kjkx6\" (UID: \"e511a2b8-6ef4-4788-9975-1801322e1d9d\") " pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.943024 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2b6j\" (UniqueName: \"kubernetes.io/projected/6a551e8a-d979-4cdb-87f5-1075b0b49a36-kube-api-access-d2b6j\") pod \"machine-config-daemon-jcmzp\" (UID: \"6a551e8a-d979-4cdb-87f5-1075b0b49a36\") " pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.943211 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmq8v\" (UniqueName: \"kubernetes.io/projected/08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6-kube-api-access-wmq8v\") pod \"multus-hfpdh\" (UID: \"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\") " pod="openshift-multus/multus-hfpdh" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.948906 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.964481 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.971477 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-01 19:28:39 +0000 UTC, rotation deadline is 2026-10-01 07:05:13.355600715 +0000 UTC Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.971550 4888 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7283h31m32.384053061s for next certificate rotation Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.976308 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.978603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.978728 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.978800 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.978863 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.978923 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:40Z","lastTransitionTime":"2025-12-01T19:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:40 crc kubenswrapper[4888]: I1201 19:33:40.994781 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.007859 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.022819 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.081150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.081210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.081221 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.081235 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.081244 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.119555 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kjkx6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.124718 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" Dec 01 19:33:41 crc kubenswrapper[4888]: W1201 19:33:41.133294 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode511a2b8_6ef4_4788_9975_1801322e1d9d.slice/crio-709fc834da1d9b3f2d9a7670599ea9fe7a82656ff78b6dc670180e883dee70a1 WatchSource:0}: Error finding container 709fc834da1d9b3f2d9a7670599ea9fe7a82656ff78b6dc670180e883dee70a1: Status 404 returned error can't find the container with id 709fc834da1d9b3f2d9a7670599ea9fe7a82656ff78b6dc670180e883dee70a1 Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.133780 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.138091 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hfpdh" Dec 01 19:33:41 crc kubenswrapper[4888]: W1201 19:33:41.157619 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a551e8a_d979_4cdb_87f5_1075b0b49a36.slice/crio-ce2a9965280da6c81c5ce1567dd9a322e13b71f33778afdd92ba34cc36bd6793 WatchSource:0}: Error finding container ce2a9965280da6c81c5ce1567dd9a322e13b71f33778afdd92ba34cc36bd6793: Status 404 returned error can't find the container with id ce2a9965280da6c81c5ce1567dd9a322e13b71f33778afdd92ba34cc36bd6793 Dec 01 19:33:41 crc kubenswrapper[4888]: W1201 19:33:41.162209 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08a0b7d8_c2e5_4053_a3d7_b4bcdf604ea6.slice/crio-ad5f3ed537cdcf4059dd975d8a7b519195391891dee079096512715f774bd42d WatchSource:0}: Error finding container ad5f3ed537cdcf4059dd975d8a7b519195391891dee079096512715f774bd42d: Status 404 returned error can't find the container with id ad5f3ed537cdcf4059dd975d8a7b519195391891dee079096512715f774bd42d Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.163933 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f4wj6"] Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.164984 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.167820 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.167994 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.168313 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.168576 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.168732 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.169127 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.169276 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.178011 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.183464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.183496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.183505 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.183519 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.183531 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.192653 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.211640 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225606 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225655 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225678 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225714 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225735 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225757 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225784 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225814 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225835 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225854 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225876 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbxc5\" (UniqueName: \"kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225894 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225911 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225928 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225947 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225975 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.225994 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.226011 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.226027 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.226045 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.231733 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.255399 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.290009 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.297968 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.298026 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.298035 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.298054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.298080 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.319116 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327053 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327089 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327105 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327120 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327143 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327160 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327175 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327205 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327225 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327241 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327256 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327271 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327293 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327309 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327326 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327347 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327370 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327385 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327403 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327430 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbxc5\" (UniqueName: \"kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.327696 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328383 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328432 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328457 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328477 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328497 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.328793 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329308 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329386 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329415 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329475 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329479 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329499 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329519 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329526 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329545 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.329571 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.330199 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.337764 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.337931 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.350935 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.352961 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbxc5\" (UniqueName: \"kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5\") pod \"ovnkube-node-f4wj6\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.368478 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.380518 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.392656 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.402065 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.402106 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.402119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.402136 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.402150 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.417096 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.479440 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:41 crc kubenswrapper[4888]: W1201 19:33:41.491196 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod578ef97f_2ce3_405a_9f4e_fcaa5f98df07.slice/crio-b0311eeaa7a47871b5e23b4f6bbaed96b2b8aa682be4987282edb20a2c4e2d26 WatchSource:0}: Error finding container b0311eeaa7a47871b5e23b4f6bbaed96b2b8aa682be4987282edb20a2c4e2d26: Status 404 returned error can't find the container with id b0311eeaa7a47871b5e23b4f6bbaed96b2b8aa682be4987282edb20a2c4e2d26 Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.504750 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.504797 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.504811 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.504827 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.504837 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.568483 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.568538 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"b0311eeaa7a47871b5e23b4f6bbaed96b2b8aa682be4987282edb20a2c4e2d26"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.571373 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.571405 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.571416 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"ce2a9965280da6c81c5ce1567dd9a322e13b71f33778afdd92ba34cc36bd6793"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.573354 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kjkx6" event={"ID":"e511a2b8-6ef4-4788-9975-1801322e1d9d","Type":"ContainerStarted","Data":"4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.573427 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kjkx6" event={"ID":"e511a2b8-6ef4-4788-9975-1801322e1d9d","Type":"ContainerStarted","Data":"709fc834da1d9b3f2d9a7670599ea9fe7a82656ff78b6dc670180e883dee70a1"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.578590 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerStarted","Data":"4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.578636 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerStarted","Data":"ad5f3ed537cdcf4059dd975d8a7b519195391891dee079096512715f774bd42d"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.580281 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69" exitCode=0 Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.580327 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.580343 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerStarted","Data":"705a95f474842c7d5d5075ff32e24172da1aeaf7e3302c2bbea1a8ba152d5983"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.581916 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.589195 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.603057 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.607665 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.607706 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.607715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.607729 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.607740 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.621647 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.635565 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.646691 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.659360 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.674655 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.686442 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.698918 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.710517 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.710547 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.710555 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.710568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.710577 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.721957 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.737882 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.747684 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.758229 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.773237 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.783974 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.793437 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.805133 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.812886 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.812923 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.812933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.812948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.812958 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.823097 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.836693 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.848036 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.864621 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.875707 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.888498 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.899984 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.911375 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.915552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.915576 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.915585 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.915599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.915608 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:41Z","lastTransitionTime":"2025-12-01T19:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:41 crc kubenswrapper[4888]: I1201 19:33:41.922300 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:41Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.017729 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.017763 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.017772 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.017787 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.017797 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.120583 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.120979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.120994 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.121012 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.121024 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.136465 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.136596 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.136719 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:33:46.136692758 +0000 UTC m=+26.007722692 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.136776 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.136835 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.136858 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:46.136836362 +0000 UTC m=+26.007866286 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.136953 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.137005 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:46.136992657 +0000 UTC m=+26.008022571 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.222584 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.222630 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.222641 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.222659 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.222672 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.237358 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.237413 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237550 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237569 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237582 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237593 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237621 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237632 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237640 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:46.23762366 +0000 UTC m=+26.108653574 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.237684 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:46.237665642 +0000 UTC m=+26.108695556 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.325953 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.325983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.325993 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.326006 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.326024 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.429237 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.429594 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.429608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.429623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.429640 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.451339 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.451391 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.451339 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.451493 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.451546 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:42 crc kubenswrapper[4888]: E1201 19:33:42.451625 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.531870 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.531904 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.531913 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.531926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.531935 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.590054 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d" exitCode=0 Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.590115 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597243 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" exitCode=0 Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597394 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597577 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597606 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597619 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597633 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.597645 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.602364 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.617171 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.629666 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.633874 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.633900 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.633908 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.633922 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.633931 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.640799 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.651141 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.664542 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.677371 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.693417 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.707821 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.719944 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.731678 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.735603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.735627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.735637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.735649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.735658 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.743427 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.753389 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:42Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.837912 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.837956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.837969 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.837988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.838001 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.940899 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.940941 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.940953 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.940967 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:42 crc kubenswrapper[4888]: I1201 19:33:42.940976 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:42Z","lastTransitionTime":"2025-12-01T19:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.042798 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.042854 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.042865 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.042886 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.042901 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.144916 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.144953 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.144962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.144995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.145004 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.248096 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.248142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.248153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.248169 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.248240 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.351435 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.351469 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.351479 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.351495 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.351503 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.454255 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.454293 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.454305 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.454320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.454330 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.556538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.556569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.556578 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.556593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.556604 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.603048 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.605406 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b" exitCode=0 Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.605439 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.617440 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.631020 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.644002 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.658575 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.658611 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.658619 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.658633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.658642 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.660905 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.671665 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.683833 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.697130 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.708082 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.718456 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.732249 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.743929 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.758893 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.761244 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.761280 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.761289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.761302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.761312 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.777862 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:43Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.863330 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.863367 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.863377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.863399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.863408 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.965901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.965944 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.965960 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.965980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:43 crc kubenswrapper[4888]: I1201 19:33:43.965993 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:43Z","lastTransitionTime":"2025-12-01T19:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.068289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.068347 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.068356 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.068370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.068379 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.105680 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-tqpk6"] Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.106165 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.109705 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.109724 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.109722 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.110400 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.123490 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.138604 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.155699 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.156319 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e63626be-5025-460a-85bd-236bf6ece71a-serviceca\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.156365 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks8wk\" (UniqueName: \"kubernetes.io/projected/e63626be-5025-460a-85bd-236bf6ece71a-kube-api-access-ks8wk\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.156413 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63626be-5025-460a-85bd-236bf6ece71a-host\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.170993 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.171746 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.171775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.171784 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.171822 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.171831 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.182958 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.196925 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.210426 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.223420 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.233760 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.244845 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.253604 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.257848 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e63626be-5025-460a-85bd-236bf6ece71a-serviceca\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.257906 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks8wk\" (UniqueName: \"kubernetes.io/projected/e63626be-5025-460a-85bd-236bf6ece71a-kube-api-access-ks8wk\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.257950 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63626be-5025-460a-85bd-236bf6ece71a-host\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.258007 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e63626be-5025-460a-85bd-236bf6ece71a-host\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.259137 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e63626be-5025-460a-85bd-236bf6ece71a-serviceca\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.264534 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.275081 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.275127 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.275140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.275157 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.275169 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.276220 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.276861 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks8wk\" (UniqueName: \"kubernetes.io/projected/e63626be-5025-460a-85bd-236bf6ece71a-kube-api-access-ks8wk\") pod \"node-ca-tqpk6\" (UID: \"e63626be-5025-460a-85bd-236bf6ece71a\") " pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.293672 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.377281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.377334 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.377345 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.377361 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.377374 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.428177 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tqpk6" Dec 01 19:33:44 crc kubenswrapper[4888]: W1201 19:33:44.443632 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode63626be_5025_460a_85bd_236bf6ece71a.slice/crio-1b681347dffca8706752be3e6f48489e24acf3bbdfd22fee89367000b49f7a85 WatchSource:0}: Error finding container 1b681347dffca8706752be3e6f48489e24acf3bbdfd22fee89367000b49f7a85: Status 404 returned error can't find the container with id 1b681347dffca8706752be3e6f48489e24acf3bbdfd22fee89367000b49f7a85 Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.450759 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.450845 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.450882 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:44 crc kubenswrapper[4888]: E1201 19:33:44.450927 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:44 crc kubenswrapper[4888]: E1201 19:33:44.451013 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:44 crc kubenswrapper[4888]: E1201 19:33:44.451149 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.479931 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.479967 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.479975 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.479989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.479999 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.582037 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.582072 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.582081 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.582095 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.582104 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.609418 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tqpk6" event={"ID":"e63626be-5025-460a-85bd-236bf6ece71a","Type":"ContainerStarted","Data":"1b681347dffca8706752be3e6f48489e24acf3bbdfd22fee89367000b49f7a85"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.611567 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6" exitCode=0 Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.611593 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.624914 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.663542 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.675766 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.686486 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.686524 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.686534 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.686548 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.686570 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.688437 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.701920 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.713024 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.724971 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.745767 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.758391 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.771131 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.785026 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.788364 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.788475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.788549 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.788613 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.788752 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.799907 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.812172 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.824358 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:44Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.891013 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.891048 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.891060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.891077 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.891087 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.992569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.992608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.992619 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.992636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:44 crc kubenswrapper[4888]: I1201 19:33:44.992645 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:44Z","lastTransitionTime":"2025-12-01T19:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.095014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.095047 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.095056 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.095071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.095079 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.197185 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.197242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.197252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.197268 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.197280 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.299567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.299597 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.299606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.299618 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.299627 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.402894 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.402944 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.402956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.402972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.402983 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.507675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.507956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.508020 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.508093 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.508157 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.611831 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.611863 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.611871 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.611884 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.611894 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.615697 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tqpk6" event={"ID":"e63626be-5025-460a-85bd-236bf6ece71a","Type":"ContainerStarted","Data":"a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.620563 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.623806 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108" exitCode=0 Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.623863 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.629184 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.643939 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.668929 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.679149 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.690423 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.701858 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.714478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.714509 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.714519 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.714535 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.714546 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.721090 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.740253 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.755340 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.769280 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.781203 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.793195 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.806726 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.817932 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.817989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.818004 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.818023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.818037 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.826158 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.842039 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.854291 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.870130 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.885579 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.897215 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.914987 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.920015 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.920062 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.920074 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.920095 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.920122 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:45Z","lastTransitionTime":"2025-12-01T19:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.927214 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.939449 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.952611 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.966066 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.978059 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:45 crc kubenswrapper[4888]: I1201 19:33:45.989238 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:45Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.007071 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.016358 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.022175 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.022320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.022338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.022361 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.022379 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.125108 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.125161 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.125173 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.125213 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.125223 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.181178 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.181286 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.181268272 +0000 UTC m=+34.052298186 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.181316 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.181365 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.181439 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.181467 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.181497 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.181485518 +0000 UTC m=+34.052515432 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.181511 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.181505249 +0000 UTC m=+34.052535153 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.227146 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.227197 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.227206 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.227220 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.227229 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.282203 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.282283 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282399 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282416 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282418 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282460 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282475 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282440 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282546 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.282531414 +0000 UTC m=+34.153561328 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.282619 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.282604466 +0000 UTC m=+34.153634380 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.329730 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.329775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.329784 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.329798 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.329808 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.434413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.434482 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.434493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.434512 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.434532 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.450418 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.450511 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.450423 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.450583 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.450669 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:46 crc kubenswrapper[4888]: E1201 19:33:46.450776 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.537053 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.537084 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.537094 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.537109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.537121 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.630250 4888 generic.go:334] "Generic (PLEG): container finished" podID="d432d9cf-070c-4c1f-997e-481b0087a5a4" containerID="ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d" exitCode=0 Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.630321 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerDied","Data":"ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.638998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.639042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.639054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.639073 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.639086 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.656301 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.668972 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.679537 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.690550 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.703396 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.714208 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.725825 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.741635 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.741672 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.741687 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.741715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.741738 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.742331 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.751828 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.763688 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.776532 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.789590 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.800956 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.812076 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:46Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.844340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.844380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.844390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.844408 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.844422 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.946711 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.946752 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.946769 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.946788 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:46 crc kubenswrapper[4888]: I1201 19:33:46.946798 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:46Z","lastTransitionTime":"2025-12-01T19:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.049343 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.049391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.049404 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.049421 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.049435 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.152091 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.152120 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.152128 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.152141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.152149 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.254462 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.254497 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.254508 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.254525 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.254537 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.356333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.356379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.356402 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.356424 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.356439 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.459086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.459372 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.459385 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.459401 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.459412 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.562176 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.562245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.562257 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.562277 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.562297 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.636440 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.637448 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.637499 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.642419 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" event={"ID":"d432d9cf-070c-4c1f-997e-481b0087a5a4","Type":"ContainerStarted","Data":"fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.651594 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.660701 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.664427 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.664461 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.664470 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.664489 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.664507 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.665683 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.666661 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.678046 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.690081 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.704876 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.717875 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.729651 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.746676 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.755855 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766554 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766586 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766613 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766624 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.766666 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.778952 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.791575 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.803957 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.815747 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.827567 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.838103 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.853092 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.865330 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.868310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.868353 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.868363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.868379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.868390 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.874246 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.886565 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.897263 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.906865 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.917113 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.927977 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.939254 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.951570 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.970345 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.970390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.970403 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.970422 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.970434 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:47Z","lastTransitionTime":"2025-12-01T19:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.976209 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:47 crc kubenswrapper[4888]: I1201 19:33:47.993011 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:47Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.073418 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.073586 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.073594 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.073606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.073616 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.175388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.175428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.175438 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.175454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.175465 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.277843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.277906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.277919 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.277935 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.277944 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.380119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.380156 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.380166 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.380200 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.380210 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.450367 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.450408 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.450419 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.450504 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.450599 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.450663 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.482357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.482390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.482400 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.482417 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.482427 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.583936 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.583964 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.583972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.583984 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.583993 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.644983 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.686135 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.686173 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.686195 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.686210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.686219 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.788476 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.788518 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.788532 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.788552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.788567 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.891060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.891119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.891135 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.891154 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.891166 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.919822 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.919873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.919887 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.919905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.919918 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.934058 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:48Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.937637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.937695 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.937712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.937735 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.937750 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.956455 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:48Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.960230 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.960266 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.960278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.960298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.960309 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.974066 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:48Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.977876 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.977902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.977912 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.977925 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.977936 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:48 crc kubenswrapper[4888]: E1201 19:33:48.992155 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:48Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.996053 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.996137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.996149 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.996171 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:48 crc kubenswrapper[4888]: I1201 19:33:48.996203 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:48Z","lastTransitionTime":"2025-12-01T19:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: E1201 19:33:49.012707 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:49Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:49 crc kubenswrapper[4888]: E1201 19:33:49.012826 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.014692 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.014742 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.014756 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.014776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.014789 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.117362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.117410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.117431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.117450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.117463 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.219057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.219096 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.219109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.219125 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.219137 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.321236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.321315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.321329 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.321344 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.321354 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.424076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.424124 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.424136 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.424152 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.424163 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.526484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.526516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.526525 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.526539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.526551 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.628934 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.628972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.628987 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.629002 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.629013 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.647556 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.731309 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.731348 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.731360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.731376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.731387 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.833334 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.833394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.833406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.833425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.833439 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.935701 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.935742 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.935755 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.935772 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:49 crc kubenswrapper[4888]: I1201 19:33:49.935783 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:49Z","lastTransitionTime":"2025-12-01T19:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.038653 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.038704 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.038715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.038731 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.038742 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.140888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.140920 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.140930 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.140945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.140955 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.243907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.243953 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.243965 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.243982 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.243996 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.346253 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.346326 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.346339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.346355 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.346364 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.449466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.449558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.449578 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.449621 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.449643 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.450649 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.450662 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:50 crc kubenswrapper[4888]: E1201 19:33:50.450763 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.450643 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:50 crc kubenswrapper[4888]: E1201 19:33:50.450926 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:50 crc kubenswrapper[4888]: E1201 19:33:50.451160 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.470935 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.495084 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.522147 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.545277 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.551938 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.551970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.551983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.551998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.552011 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.563659 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.576991 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.590532 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.605916 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.619528 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.632889 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.644862 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.653570 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.653600 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.653608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.653621 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.653631 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.654257 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/0.log" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.657901 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f" exitCode=1 Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.657988 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.658819 4888 scope.go:117] "RemoveContainer" containerID="e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.664126 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.675305 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.691681 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.704431 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.717298 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.730103 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.743239 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.756722 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.758429 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.758467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.758479 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.758494 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.758506 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.770493 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.786046 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.795161 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.804972 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.816439 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.827884 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.838982 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.849504 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.858822 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.860289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.860321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.860333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.860349 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.860360 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.963032 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.963088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.963113 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.963134 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:50 crc kubenswrapper[4888]: I1201 19:33:50.963149 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:50Z","lastTransitionTime":"2025-12-01T19:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.065824 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.065864 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.065875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.065891 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.065900 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.168795 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.168840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.168851 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.168868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.168879 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.271081 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.271128 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.271145 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.271167 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.271189 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.382935 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.382989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.382998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.383011 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.383029 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.485210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.485262 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.485276 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.485294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.485306 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.587310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.587351 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.587363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.587380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.587391 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.663111 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/0.log" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.665727 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.665850 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.681243 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.689442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.689475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.689484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.689498 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.689513 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.696331 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.705963 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.716492 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.727371 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.737230 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.748154 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.763922 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.772508 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.783003 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.791624 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.791663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.791675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.791691 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.791703 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.793404 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.805671 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.816880 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.828967 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.893321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.893572 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.893636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.893720 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:51 crc kubenswrapper[4888]: I1201 19:33:51.893806 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:51Z","lastTransitionTime":"2025-12-01T19:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.001616 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.001873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.001952 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.002010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.002068 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.103980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.104241 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.104415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.104615 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.104773 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.207379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.207606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.207667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.207739 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.207896 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.311109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.311444 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.311565 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.311657 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.311744 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.413877 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.413928 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.413942 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.413959 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.413974 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.451049 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:52 crc kubenswrapper[4888]: E1201 19:33:52.451156 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.451407 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.451523 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:52 crc kubenswrapper[4888]: E1201 19:33:52.451662 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:52 crc kubenswrapper[4888]: E1201 19:33:52.451532 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.516857 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.516895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.516904 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.516918 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.516926 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.619478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.619516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.619527 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.619544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.619555 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.669487 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/1.log" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.670055 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/0.log" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.672499 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3" exitCode=1 Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.672531 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.672571 4888 scope.go:117] "RemoveContainer" containerID="e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.673979 4888 scope.go:117] "RemoveContainer" containerID="5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3" Dec 01 19:33:52 crc kubenswrapper[4888]: E1201 19:33:52.674282 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.687437 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.700225 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.713755 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.723988 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.724229 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.724253 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.724262 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.724306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.724319 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.734255 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.744156 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.757102 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.772573 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.780784 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.791469 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.801932 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.811823 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.821581 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9"] Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.822051 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.823505 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.824548 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826277 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826421 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826437 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826448 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.826465 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.837084 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.848452 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.864028 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.871840 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.881464 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.893492 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.905103 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.915981 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.926771 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.928106 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.928300 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.928489 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.928796 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.929006 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:52Z","lastTransitionTime":"2025-12-01T19:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.936448 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.943708 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.943756 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.943800 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.943842 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5mmn\" (UniqueName: \"kubernetes.io/projected/79f63bfb-e11c-4a38-a47f-3162cca30e66-kube-api-access-s5mmn\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.944770 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.953636 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.963608 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.974562 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.986260 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:52 crc kubenswrapper[4888]: I1201 19:33:52.999794 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:52Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.031810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.032016 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.032109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.032172 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.032266 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.044506 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.044710 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.044853 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.044985 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5mmn\" (UniqueName: \"kubernetes.io/projected/79f63bfb-e11c-4a38-a47f-3162cca30e66-kube-api-access-s5mmn\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.045266 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-env-overrides\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.045425 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.049601 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/79f63bfb-e11c-4a38-a47f-3162cca30e66-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.060164 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5mmn\" (UniqueName: \"kubernetes.io/projected/79f63bfb-e11c-4a38-a47f-3162cca30e66-kube-api-access-s5mmn\") pod \"ovnkube-control-plane-749d76644c-sttz9\" (UID: \"79f63bfb-e11c-4a38-a47f-3162cca30e66\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.133384 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.134658 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.134767 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.134849 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.134926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.134997 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: W1201 19:33:53.144391 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79f63bfb_e11c_4a38_a47f_3162cca30e66.slice/crio-9e0d72d605da9862722024ca91ac8e6fe76549c8aad3ae36004ff3c2cdcea186 WatchSource:0}: Error finding container 9e0d72d605da9862722024ca91ac8e6fe76549c8aad3ae36004ff3c2cdcea186: Status 404 returned error can't find the container with id 9e0d72d605da9862722024ca91ac8e6fe76549c8aad3ae36004ff3c2cdcea186 Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.237467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.237511 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.237522 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.237537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.237547 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.339713 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.339744 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.339752 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.339765 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.339773 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.441991 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.442020 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.442027 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.442040 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.442048 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.545590 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.545690 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.545712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.545747 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.545771 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.648684 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.648725 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.648736 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.648752 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.648764 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.677168 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" event={"ID":"79f63bfb-e11c-4a38-a47f-3162cca30e66","Type":"ContainerStarted","Data":"d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.677221 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" event={"ID":"79f63bfb-e11c-4a38-a47f-3162cca30e66","Type":"ContainerStarted","Data":"92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.677232 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" event={"ID":"79f63bfb-e11c-4a38-a47f-3162cca30e66","Type":"ContainerStarted","Data":"9e0d72d605da9862722024ca91ac8e6fe76549c8aad3ae36004ff3c2cdcea186"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.678741 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/1.log" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.690885 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.705732 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.719369 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.730278 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.750819 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.750848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.750988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.750998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.751012 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.751022 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.764926 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.774595 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.785633 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.796569 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.808292 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.820439 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.830848 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.841432 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.853633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.853660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.853669 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.853685 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.853697 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.859463 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.870109 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:53Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.955718 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.955753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.955761 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.955775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:53 crc kubenswrapper[4888]: I1201 19:33:53.955784 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:53Z","lastTransitionTime":"2025-12-01T19:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.058712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.058807 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.058827 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.058860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.058888 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.161250 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.161285 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.161293 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.161306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.161316 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.256883 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.257072 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.257030849 +0000 UTC m=+50.128060783 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.257272 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.257352 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.257437 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.257466 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.257499 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.257484482 +0000 UTC m=+50.128514396 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.257519 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.257506453 +0000 UTC m=+50.128536377 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.263406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.263450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.263462 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.263484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.263508 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.298344 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gb7nn"] Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.299167 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.299248 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.310666 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.326299 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.342088 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.357653 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.357802 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.357841 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwlh5\" (UniqueName: \"kubernetes.io/projected/4a71b974-d433-46e2-904d-2d955ba74014-kube-api-access-hwlh5\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.357864 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.357889 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.357945 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.357965 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.357968 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.357976 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.357990 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.358002 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.358026 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.358013333 +0000 UTC m=+50.229043247 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.358039 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.358033284 +0000 UTC m=+50.229063198 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.365369 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.365402 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.365415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.365430 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.365442 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.368292 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.378670 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.388506 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.399051 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.413805 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.429387 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.438506 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.450443 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.450444 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.450561 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.450653 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.450789 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.450936 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.451803 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.458270 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwlh5\" (UniqueName: \"kubernetes.io/projected/4a71b974-d433-46e2-904d-2d955ba74014-kube-api-access-hwlh5\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.458322 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.458428 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.458474 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:54.958462012 +0000 UTC m=+34.829491916 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.466481 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.467539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.467579 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.467590 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.467604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.467613 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.479511 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.480154 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwlh5\" (UniqueName: \"kubernetes.io/projected/4a71b974-d433-46e2-904d-2d955ba74014-kube-api-access-hwlh5\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.496475 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.506322 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:54Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.569862 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.569905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.569917 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.569935 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.569949 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.671862 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.671905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.671914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.671931 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.671943 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.774710 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.774744 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.774753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.774766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.774775 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.877133 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.877168 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.877175 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.877203 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.877212 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.962324 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.962491 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: E1201 19:33:54.962557 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:55.962538888 +0000 UTC m=+35.833568792 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.979436 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.979483 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.979494 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.979508 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:54 crc kubenswrapper[4888]: I1201 19:33:54.979518 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:54Z","lastTransitionTime":"2025-12-01T19:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.081758 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.081956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.082046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.082117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.082220 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.184998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.185043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.185054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.185069 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.185079 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.288400 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.288472 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.288490 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.288513 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.288530 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.392118 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.392239 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.392267 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.392295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.392327 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.450686 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:55 crc kubenswrapper[4888]: E1201 19:33:55.450872 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.494803 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.494835 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.494843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.494855 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.494863 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.598124 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.598519 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.598663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.598850 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.599000 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.701864 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.702101 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.702219 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.702330 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.702418 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.804774 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.804813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.804825 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.804841 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.804852 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.907234 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.907277 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.907286 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.907303 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.907312 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:55Z","lastTransitionTime":"2025-12-01T19:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:55 crc kubenswrapper[4888]: I1201 19:33:55.973015 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:55 crc kubenswrapper[4888]: E1201 19:33:55.973159 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:55 crc kubenswrapper[4888]: E1201 19:33:55.973243 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:33:57.973223051 +0000 UTC m=+37.844252965 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.009720 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.009764 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.009781 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.009802 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.009814 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.112431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.112466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.112476 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.112491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.112502 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.214601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.214734 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.214746 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.214764 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.214775 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.317788 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.317845 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.317864 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.317888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.317904 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.419883 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.420282 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.420436 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.420578 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.420709 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.450730 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.451004 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.450749 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:56 crc kubenswrapper[4888]: E1201 19:33:56.451425 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:56 crc kubenswrapper[4888]: E1201 19:33:56.451023 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:56 crc kubenswrapper[4888]: E1201 19:33:56.451689 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.526388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.526450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.526468 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.526492 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.526509 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.628866 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.628926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.628943 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.628961 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.628974 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.731888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.731927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.731937 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.731952 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.731963 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.834632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.834677 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.834689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.834707 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.834723 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.937211 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.937248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.937257 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.937270 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:56 crc kubenswrapper[4888]: I1201 19:33:56.937279 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:56Z","lastTransitionTime":"2025-12-01T19:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.039690 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.039752 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.039770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.039795 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.039813 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.142467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.142527 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.142543 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.142567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.142621 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.245117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.245180 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.245221 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.245239 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.245250 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.348352 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.348399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.348411 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.348430 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.348442 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.450215 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:57 crc kubenswrapper[4888]: E1201 19:33:57.450393 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.451874 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.452130 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.452162 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.452234 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.452265 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.555175 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.555281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.555302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.555335 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.555358 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.658580 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.658657 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.658677 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.658704 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.658729 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.763010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.763070 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.763083 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.763103 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.763116 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.865533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.865572 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.865586 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.865601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.865610 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.969265 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.969306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.969315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.969345 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.969355 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:57Z","lastTransitionTime":"2025-12-01T19:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:57 crc kubenswrapper[4888]: I1201 19:33:57.994003 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:57 crc kubenswrapper[4888]: E1201 19:33:57.994169 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:57 crc kubenswrapper[4888]: E1201 19:33:57.994237 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:01.994219975 +0000 UTC m=+41.865249889 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.071733 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.071782 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.071794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.071810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.071821 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.174425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.174475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.174492 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.174516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.174537 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.277331 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.277364 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.277374 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.277390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.277401 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.379988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.380042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.380055 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.380074 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.380086 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.450358 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.450372 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:33:58 crc kubenswrapper[4888]: E1201 19:33:58.450507 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.450392 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:33:58 crc kubenswrapper[4888]: E1201 19:33:58.450708 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:33:58 crc kubenswrapper[4888]: E1201 19:33:58.450827 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.482259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.482325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.482340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.482358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.482375 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.586235 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.586544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.586937 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.587368 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.587678 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.691023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.691403 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.691595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.691778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.692134 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.796408 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.796917 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.797018 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.797113 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.797215 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.858474 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.881096 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.896876 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.899747 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.900019 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.900316 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.900581 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.900750 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:58Z","lastTransitionTime":"2025-12-01T19:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.914351 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.928669 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.951480 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.971437 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:58 crc kubenswrapper[4888]: I1201 19:33:58.986614 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:58Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.003473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.003526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.003537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.003556 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.003568 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.005167 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.020695 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.039162 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.058098 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.070814 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.087946 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.105861 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.106102 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.106252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.106392 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.106522 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.116870 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.132095 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.134756 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.134783 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.134792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.134806 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.134815 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.149581 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153015 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153842 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153874 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.153912 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.169273 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.172206 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.172236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.172244 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.172256 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.172347 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.188344 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.192354 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.192445 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.192468 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.192533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.192556 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.211631 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.216325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.216363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.216377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.216399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.216413 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.229511 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:33:59Z is after 2025-08-24T17:21:41Z" Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.229873 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.231434 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.231467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.231481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.231501 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.231515 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.333299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.333342 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.333356 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.333376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.333390 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.435352 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.435389 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.435400 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.435416 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.435427 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.450141 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:33:59 crc kubenswrapper[4888]: E1201 19:33:59.450285 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.537516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.537564 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.537576 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.537593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.537606 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.640362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.640671 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.640768 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.640860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.640946 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.742991 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.743031 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.743042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.743059 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.743071 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.844951 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.844995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.845011 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.845030 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.845044 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.947371 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.947413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.947425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.947446 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:33:59 crc kubenswrapper[4888]: I1201 19:33:59.947458 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:33:59Z","lastTransitionTime":"2025-12-01T19:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.049632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.049693 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.049702 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.049715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.049723 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.151851 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.151933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.151945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.151961 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.151973 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.254978 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.255030 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.255046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.255067 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.255082 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.357040 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.357099 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.357110 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.357122 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.357130 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.451418 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.451523 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:00 crc kubenswrapper[4888]: E1201 19:34:00.451587 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:00 crc kubenswrapper[4888]: E1201 19:34:00.451648 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.451731 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:00 crc kubenswrapper[4888]: E1201 19:34:00.451875 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.461254 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.461357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.461375 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.461399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.461416 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.468933 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.483245 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.498621 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.512268 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.526361 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.541874 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.554505 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.563588 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.563633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.563646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.563668 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.563680 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.568978 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.586670 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.611681 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1a5540a829e4f3bda5a066fe4fd1d6a1fa7c53d04880b5a00725e47855cd41f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:49Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:49.843016 6188 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:49.843056 6188 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:33:49.843117 6188 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:49.843207 6188 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:33:49.843285 6188 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 19:33:49.843333 6188 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:33:49.843364 6188 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:33:49.843385 6188 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 19:33:49.843391 6188 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:33:49.843391 6188 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 19:33:49.843411 6188 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:33:49.843429 6188 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:33:49.843436 6188 factory.go:656] Stopping watch factory\\\\nI1201 19:33:49.843435 6188 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:33:49.843449 6188 ovnkube.go:599] Stopped ovnkube\\\\nI12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.628879 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.645560 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.664369 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.667434 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.667940 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.667968 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.668000 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.668023 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.680854 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.702777 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.715340 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.770668 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.770766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.770788 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.770816 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.770836 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.874112 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.874228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.874257 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.874292 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.874314 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.977240 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.977315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.977338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.977371 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:00 crc kubenswrapper[4888]: I1201 19:34:00.977394 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:00Z","lastTransitionTime":"2025-12-01T19:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.080043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.080117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.080137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.080168 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.080220 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.183776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.183836 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.183853 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.183876 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.183894 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.286229 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.286293 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.286310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.286337 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.286356 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.388384 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.388418 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.388427 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.388440 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.388450 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.450342 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:01 crc kubenswrapper[4888]: E1201 19:34:01.450442 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.490266 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.490305 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.490336 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.490351 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.490361 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.592322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.592362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.592373 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.592389 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.592400 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.694926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.694996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.695034 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.695064 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.695084 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.797705 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.797753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.797773 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.797792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.797807 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.901253 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.901321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.901339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.901363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:01 crc kubenswrapper[4888]: I1201 19:34:01.901380 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:01Z","lastTransitionTime":"2025-12-01T19:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.004489 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.004540 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.004552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.004568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.004580 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.041041 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:02 crc kubenswrapper[4888]: E1201 19:34:02.041231 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:02 crc kubenswrapper[4888]: E1201 19:34:02.041316 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:10.041299521 +0000 UTC m=+49.912329445 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.106910 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.106970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.106994 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.107017 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.107032 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.209288 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.209318 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.209327 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.209340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.209348 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.311926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.311984 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.312000 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.312021 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.312036 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.415282 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.415366 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.415377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.415390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.415400 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.450331 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.450411 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.450408 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:02 crc kubenswrapper[4888]: E1201 19:34:02.450552 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:02 crc kubenswrapper[4888]: E1201 19:34:02.450647 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:02 crc kubenswrapper[4888]: E1201 19:34:02.450685 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.517761 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.517821 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.517837 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.517859 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.517876 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.620801 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.620888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.620912 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.620941 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.620966 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.723469 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.723514 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.723528 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.723547 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.723561 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.826208 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.826258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.826273 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.826294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.826308 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.929803 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.929849 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.929862 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.929880 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:02 crc kubenswrapper[4888]: I1201 19:34:02.929893 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:02Z","lastTransitionTime":"2025-12-01T19:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.032662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.032717 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.032734 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.032759 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.032776 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.136250 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.136294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.136305 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.136322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.136332 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.239091 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.239141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.239154 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.239177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.239215 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.342491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.342543 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.342560 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.342582 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.342599 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.445621 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.445689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.445712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.445740 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.445764 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.450947 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:03 crc kubenswrapper[4888]: E1201 19:34:03.451113 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.548889 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.548952 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.548972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.548996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.549013 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.651370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.651432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.651448 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.651473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.651490 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.754567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.754627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.754636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.754670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.754683 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.857107 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.857392 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.857451 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.857510 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.857591 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.960985 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.961054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.961076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.961103 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:03 crc kubenswrapper[4888]: I1201 19:34:03.961123 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:03Z","lastTransitionTime":"2025-12-01T19:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.064538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.065298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.065333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.065362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.065384 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.168111 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.168150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.168160 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.168176 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.168212 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.271425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.271472 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.271484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.271501 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.271514 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.375040 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.375099 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.375117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.375141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.375159 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.450869 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:04 crc kubenswrapper[4888]: E1201 19:34:04.451123 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.450915 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.451231 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:04 crc kubenswrapper[4888]: E1201 19:34:04.451388 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:04 crc kubenswrapper[4888]: E1201 19:34:04.451489 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.478137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.478228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.478248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.478277 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.478296 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.581606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.581698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.581715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.581738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.581749 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.683735 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.683766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.683774 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.683787 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.683798 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.786913 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.786960 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.786972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.786990 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.787002 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.890541 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.890589 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.890601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.890617 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.890629 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.993976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.994048 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.994069 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.994093 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:04 crc kubenswrapper[4888]: I1201 19:34:04.994112 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:04Z","lastTransitionTime":"2025-12-01T19:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.097423 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.097983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.098080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.098218 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.098322 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.201723 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.201767 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.201779 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.201821 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.201834 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.304811 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.305504 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.305540 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.305585 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.305614 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.409399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.409495 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.409522 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.409561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.409591 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.450541 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:05 crc kubenswrapper[4888]: E1201 19:34:05.450839 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.452243 4888 scope.go:117] "RemoveContainer" containerID="5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.483086 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514757 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514780 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514805 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514662 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.514825 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.546709 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.558224 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.570966 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.587000 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.600724 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.617889 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.618979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.619057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.619102 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.619126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.619140 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.630359 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.644873 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.660538 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.675821 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.691210 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.705198 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.720115 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.721122 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.721166 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.721204 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.721228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.721253 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.722157 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/1.log" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.725586 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.725760 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.736828 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.751199 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.770773 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.784962 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.798645 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.816076 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.823295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.823356 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.823372 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.823398 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.823414 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.832476 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.846059 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.870668 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.891598 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.912201 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.926208 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.926275 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.926288 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.926310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.926336 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:05Z","lastTransitionTime":"2025-12-01T19:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.930592 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.943017 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.956489 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.970604 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.979830 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:05 crc kubenswrapper[4888]: I1201 19:34:05.989919 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:05Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.029437 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.029477 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.029493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.029507 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.029516 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.132285 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.132329 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.132338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.132353 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.132362 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.234607 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.234648 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.234661 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.234679 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.234692 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.337687 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.337720 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.337730 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.337746 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.337758 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.440781 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.440814 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.440825 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.440840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.440851 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.450533 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.450533 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:06 crc kubenswrapper[4888]: E1201 19:34:06.450693 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:06 crc kubenswrapper[4888]: E1201 19:34:06.450736 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.451271 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:06 crc kubenswrapper[4888]: E1201 19:34:06.451554 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.542835 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.542883 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.542897 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.542916 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.542928 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.645382 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.645422 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.645455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.645472 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.645485 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.730324 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/2.log" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.730898 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/1.log" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.733126 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" exitCode=1 Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.733161 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.733213 4888 scope.go:117] "RemoveContainer" containerID="5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.733854 4888 scope.go:117] "RemoveContainer" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" Dec 01 19:34:06 crc kubenswrapper[4888]: E1201 19:34:06.734026 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.744930 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.747869 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.747892 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.747900 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.747911 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.747920 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.763568 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.780025 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.805832 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.817162 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.828940 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.839413 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851764 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851882 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851893 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851911 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.851926 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.860219 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.869206 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.878167 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.887573 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.900765 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.918252 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.927688 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.940277 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:06Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.954962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.954999 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.955010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.955027 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:06 crc kubenswrapper[4888]: I1201 19:34:06.955038 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:06Z","lastTransitionTime":"2025-12-01T19:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.057845 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.057933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.057953 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.057990 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.058011 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.160626 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.160686 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.160703 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.160726 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.160743 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.264078 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.264164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.264227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.264258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.264282 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.367983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.368098 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.368126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.368164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.368238 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.450925 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:07 crc kubenswrapper[4888]: E1201 19:34:07.451284 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.471651 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.471724 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.471741 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.471769 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.471796 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.574843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.574898 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.574996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.575021 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.575036 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.677726 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.677775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.677793 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.677813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.677828 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.737491 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/2.log" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.779782 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.779838 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.779851 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.779868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.779879 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.883568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.883619 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.883628 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.883647 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.883659 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.986987 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.987039 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.987047 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.987062 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:07 crc kubenswrapper[4888]: I1201 19:34:07.987076 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:07Z","lastTransitionTime":"2025-12-01T19:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.090593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.090657 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.090669 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.090686 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.090697 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.194009 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.194085 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.194107 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.194138 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.194161 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.298294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.298378 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.298422 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.298462 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.298486 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.401024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.401117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.401139 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.401170 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.401215 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.450495 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.450545 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.450495 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:08 crc kubenswrapper[4888]: E1201 19:34:08.450670 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:08 crc kubenswrapper[4888]: E1201 19:34:08.450771 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:08 crc kubenswrapper[4888]: E1201 19:34:08.450848 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.504000 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.504045 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.504057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.504076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.504090 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.606337 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.606388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.606401 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.606420 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.606434 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.709431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.709477 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.709489 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.709507 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.709522 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.812949 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.813026 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.813049 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.813080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.813104 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.916986 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.917084 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.917110 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.917148 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:08 crc kubenswrapper[4888]: I1201 19:34:08.917175 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:08Z","lastTransitionTime":"2025-12-01T19:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.021011 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.021130 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.021150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.021217 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.021242 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.124738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.124803 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.124823 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.124862 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.124889 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.229240 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.229432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.229466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.229530 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.229557 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.261558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.261644 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.261667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.261700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.261721 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.278287 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:09Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.283478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.283523 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.283537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.283559 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.283571 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.305250 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:09Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.310338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.310413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.310428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.310452 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.310466 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.325943 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:09Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.331499 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.331557 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.331577 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.331604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.331624 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.346058 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:09Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.351545 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.351599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.351615 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.351638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.351654 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.370370 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:09Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.370523 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.372539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.372606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.372627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.372655 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.372675 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.450251 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:09 crc kubenswrapper[4888]: E1201 19:34:09.450496 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.477335 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.477865 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.478062 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.478243 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.478403 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.582141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.582247 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.582273 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.582306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.582331 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.684398 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.684779 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.684927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.685086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.685269 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.788539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.788597 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.788614 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.788639 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.788656 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.891896 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.891957 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.891974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.891995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.892012 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.996574 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.996628 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.996645 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.996667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:09 crc kubenswrapper[4888]: I1201 19:34:09.996681 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:09Z","lastTransitionTime":"2025-12-01T19:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.043845 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.044091 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.044224 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:26.044172421 +0000 UTC m=+65.915202345 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.099499 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.099550 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.099568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.099587 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.099599 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.202298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.202343 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.202357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.202377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.202388 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.306146 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.306233 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.306249 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.306268 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.306280 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.346462 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.346564 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.346597 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.346717 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.346847 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:34:42.346709371 +0000 UTC m=+82.217739345 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.346879 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.346957 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:42.346877356 +0000 UTC m=+82.217907310 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.347012 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:42.346981069 +0000 UTC m=+82.218011043 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.409529 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.409814 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.410014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.410159 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.410350 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.447654 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.447728 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447892 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447918 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447938 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447948 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447960 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.447964 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.448031 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:42.448015104 +0000 UTC m=+82.319045028 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.448052 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:42.448043665 +0000 UTC m=+82.319073599 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.451226 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.451387 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.451457 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.451778 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.451799 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:10 crc kubenswrapper[4888]: E1201 19:34:10.451961 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.465603 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.489292 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.508407 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.512606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.512660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.512677 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.512700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.512716 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.527371 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.543969 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.556451 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.569275 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.580638 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.592389 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.611450 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.614777 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.614938 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.615037 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.615171 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.615311 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.626034 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.638139 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.653000 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.677947 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720078 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720518 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720682 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720228 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720759 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.720926 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.738360 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.738610 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.748133 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.752067 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.766716 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.785823 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e7320684f3fd6e38cffa4b022186deeffe681663d50937ebae32930a93f2fc3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:33:51Z\\\",\\\"message\\\":\\\"ry.go:160\\\\nI1201 19:33:51.642055 6318 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:33:51.642138 6318 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642205 6318 factory.go:656] Stopping watch factory\\\\nI1201 19:33:51.642264 6318 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:33:51.642304 6318 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642359 6318 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642410 6318 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.642445 6318 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 19:33:51.675002 6318 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI1201 19:33:51.675108 6318 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI1201 19:33:51.675242 6318 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:33:51.675300 6318 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:33:51.675386 6318 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.797549 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.813381 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.825075 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.825341 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.825357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.825372 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.825382 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.827838 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.839736 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.854951 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.865119 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.880424 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.894275 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.914232 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.927823 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.927859 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.927872 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.927891 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.927906 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:10Z","lastTransitionTime":"2025-12-01T19:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.928756 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.939537 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.949651 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:10 crc kubenswrapper[4888]: I1201 19:34:10.962722 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:10Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.030336 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.030382 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.030394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.030412 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.030423 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.134854 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.134895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.134910 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.134927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.134939 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.238024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.238261 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.238366 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.238437 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.238496 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.341996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.343629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.343785 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.343986 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.344148 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.447565 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.447611 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.447626 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.447645 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.447658 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.451007 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:11 crc kubenswrapper[4888]: E1201 19:34:11.451148 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.479738 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.480562 4888 scope.go:117] "RemoveContainer" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" Dec 01 19:34:11 crc kubenswrapper[4888]: E1201 19:34:11.480716 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.503290 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.519535 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.539811 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.550923 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.550967 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.550980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.551001 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.551015 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.564538 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.578879 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.591544 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.605572 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.620051 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.639677 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.652433 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.653645 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.653684 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.653695 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.653709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.653719 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.667635 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.684042 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.698848 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.715054 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.726093 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.737786 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.747322 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:11Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.755071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.755095 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.755103 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.755116 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.755125 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.857815 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.857848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.857856 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.857869 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.857879 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.960860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.960933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.960950 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.960980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:11 crc kubenswrapper[4888]: I1201 19:34:11.961000 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:11Z","lastTransitionTime":"2025-12-01T19:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.064125 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.064158 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.064168 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.064197 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.064209 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.166938 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.166985 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.166998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.167016 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.167029 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.269619 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.269659 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.269670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.269688 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.269698 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.371783 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.371844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.371857 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.371878 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.371895 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.450836 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.450882 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:12 crc kubenswrapper[4888]: E1201 19:34:12.451096 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.450877 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:12 crc kubenswrapper[4888]: E1201 19:34:12.451259 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:12 crc kubenswrapper[4888]: E1201 19:34:12.451342 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.474425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.474485 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.474502 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.474527 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.474544 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.577383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.577442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.577461 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.577485 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.577502 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.679744 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.679804 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.679821 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.679845 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.679864 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.781666 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.781700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.781709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.781722 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.781731 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.884119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.884153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.884163 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.884177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.884209 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.986995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.987046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.987056 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.987069 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:12 crc kubenswrapper[4888]: I1201 19:34:12.987080 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:12Z","lastTransitionTime":"2025-12-01T19:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.089766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.089860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.089873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.089890 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.089902 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.193071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.193140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.193164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.193227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.193252 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.296119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.296164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.296178 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.296223 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.296236 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.399245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.399325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.399352 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.399384 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.399407 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.450520 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:13 crc kubenswrapper[4888]: E1201 19:34:13.450715 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.502058 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.502127 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.502150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.502181 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.502266 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.605866 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.605970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.605991 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.606016 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.606035 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.709223 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.709295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.709313 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.709342 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.709361 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.811996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.812060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.812077 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.812101 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.812118 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.914734 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.914869 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.914945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.914974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:13 crc kubenswrapper[4888]: I1201 19:34:13.915031 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:13Z","lastTransitionTime":"2025-12-01T19:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.017522 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.017603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.017620 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.017644 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.017662 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.120957 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.121029 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.121051 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.121080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.121101 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.224906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.224965 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.224982 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.225005 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.225022 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.328831 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.328939 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.328990 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.329014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.329035 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.432055 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.432107 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.432120 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.432140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.432152 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.450740 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.450801 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:14 crc kubenswrapper[4888]: E1201 19:34:14.450895 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:14 crc kubenswrapper[4888]: E1201 19:34:14.451269 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.451368 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:14 crc kubenswrapper[4888]: E1201 19:34:14.451456 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.535452 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.535513 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.535529 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.535552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.535569 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.646673 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.646728 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.646748 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.646772 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.646790 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.749542 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.749600 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.749611 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.749630 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.749642 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.852114 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.852432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.852567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.852689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.852799 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.955482 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.955617 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.955638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.955661 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:14 crc kubenswrapper[4888]: I1201 19:34:14.955675 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:14Z","lastTransitionTime":"2025-12-01T19:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.057700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.057756 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.057765 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.057778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.057786 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.160454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.160659 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.160748 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.160810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.160863 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.263667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.263709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.263721 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.263738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.263750 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.365794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.366050 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.366179 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.366320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.366432 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.450841 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:15 crc kubenswrapper[4888]: E1201 19:34:15.450974 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.468943 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.468983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.468995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.469012 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.469023 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.570874 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.570906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.570914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.570927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.570937 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.673153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.673211 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.673222 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.673237 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.673246 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.775005 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.775043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.775054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.775073 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.775084 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.878197 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.878223 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.878233 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.878248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.878259 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.980911 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.981164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.981300 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.981387 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:15 crc kubenswrapper[4888]: I1201 19:34:15.981510 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:15Z","lastTransitionTime":"2025-12-01T19:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.083698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.083741 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.083754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.083770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.083782 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.186986 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.187328 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.187473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.187616 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.187728 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.290848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.291080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.291163 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.291309 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.291394 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.394068 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.394109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.394123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.394141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.394153 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.451172 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.451286 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:16 crc kubenswrapper[4888]: E1201 19:34:16.451373 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.451295 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:16 crc kubenswrapper[4888]: E1201 19:34:16.451468 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:16 crc kubenswrapper[4888]: E1201 19:34:16.451517 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.497253 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.497335 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.497353 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.497377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.497397 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.600568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.600629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.600647 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.600670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.600687 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.703487 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.703873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.704018 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.704236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.704460 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.807669 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.807972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.808154 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.808415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.808607 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.911721 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.911799 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.911831 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.911860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:16 crc kubenswrapper[4888]: I1201 19:34:16.911880 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:16Z","lastTransitionTime":"2025-12-01T19:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.014082 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.014118 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.014126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.014141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.014150 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.116543 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.116599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.116614 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.116629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.116638 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.223238 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.223291 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.223308 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.223332 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.223350 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.326239 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.326571 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.326638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.326708 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.326769 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.428735 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.428793 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.428810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.428830 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.428845 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.450965 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:17 crc kubenswrapper[4888]: E1201 19:34:17.451061 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.531949 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.532736 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.532867 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.532990 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.533105 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.635218 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.635254 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.635264 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.635278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.635291 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.737655 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.737871 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.737960 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.738021 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.738090 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.841275 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.841333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.841350 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.841374 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.841392 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.944416 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.944478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.944502 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.944531 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:17 crc kubenswrapper[4888]: I1201 19:34:17.944552 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:17Z","lastTransitionTime":"2025-12-01T19:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.047972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.048373 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.048690 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.048914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.049098 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.152147 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.152210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.152224 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.152245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.152258 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.255128 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.255173 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.255222 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.255243 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.255260 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.358591 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.358655 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.358671 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.358691 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.358705 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.451113 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.451227 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.451338 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:18 crc kubenswrapper[4888]: E1201 19:34:18.451492 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:18 crc kubenswrapper[4888]: E1201 19:34:18.451633 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:18 crc kubenswrapper[4888]: E1201 19:34:18.451725 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.461139 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.461170 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.461200 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.461217 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.461229 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.563962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.564001 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.564050 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.564074 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.564088 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.667235 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.667279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.667292 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.667311 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.667324 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.771480 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.771557 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.771616 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.771645 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.771666 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.874394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.874423 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.874431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.874443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.874452 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.976774 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.976828 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.976846 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.976868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:18 crc kubenswrapper[4888]: I1201 19:34:18.976916 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:18Z","lastTransitionTime":"2025-12-01T19:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.079458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.079491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.079501 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.079517 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.079527 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.181560 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.181603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.181618 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.181632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.181642 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.283478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.283549 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.283560 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.283572 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.283580 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.386882 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.386951 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.386963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.386980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.386992 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.450818 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.450952 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.489948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.489988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.490002 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.490017 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.490027 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.592976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.593028 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.593038 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.593054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.593065 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.696043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.696097 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.696108 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.696123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.696156 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.727942 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.727984 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.727997 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.728017 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.728033 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.749962 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:19Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.754702 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.754761 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.754780 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.754808 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.754826 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.777162 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:19Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.783410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.783479 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.783496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.783524 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.783540 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.799412 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:19Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.803765 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.803796 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.803815 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.803840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.803856 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.817655 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:19Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.822065 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.822110 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.822121 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.822141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.822157 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.838371 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:19Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:19 crc kubenswrapper[4888]: E1201 19:34:19.838538 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.840518 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.840605 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.840627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.840651 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.840669 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.943623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.943670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.943683 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.943703 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:19 crc kubenswrapper[4888]: I1201 19:34:19.943714 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:19Z","lastTransitionTime":"2025-12-01T19:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.047016 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.047127 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.047160 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.047224 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.047247 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.150491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.150582 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.150596 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.150641 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.150664 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.255455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.255539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.255580 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.255621 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.255651 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.359536 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.359608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.359620 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.359637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.359650 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.450373 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.450424 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.450438 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:20 crc kubenswrapper[4888]: E1201 19:34:20.450630 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:20 crc kubenswrapper[4888]: E1201 19:34:20.451075 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:20 crc kubenswrapper[4888]: E1201 19:34:20.451490 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.463848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.463933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.463961 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.463997 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.464036 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.476867 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.493269 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.508524 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.521921 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.542340 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.562973 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.566946 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.566979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.566989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.567006 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.567036 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.582350 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.594873 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.622425 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.634753 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.646301 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.658987 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.668858 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.668895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.668903 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.668915 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.668926 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.673808 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.686508 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.703085 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.718989 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.732974 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:20Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.770843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.770894 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.770907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.770927 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.770941 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.872868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.872904 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.872913 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.872926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.872935 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.975822 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.975873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.975885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.975903 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:20 crc kubenswrapper[4888]: I1201 19:34:20.975919 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:20Z","lastTransitionTime":"2025-12-01T19:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.078742 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.078885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.078904 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.078921 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.078931 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.181459 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.181493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.181501 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.181515 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.181523 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.284542 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.284636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.284654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.284682 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.284702 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.387240 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.387278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.387289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.387305 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.387316 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.451074 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:21 crc kubenswrapper[4888]: E1201 19:34:21.451236 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.489849 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.489914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.489939 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.489969 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.489997 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.595480 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.595561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.595591 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.595629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.595659 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.702340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.702395 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.702412 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.702433 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.702456 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.806312 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.806595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.806607 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.806623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.806633 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.908617 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.908651 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.908662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.908676 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:21 crc kubenswrapper[4888]: I1201 19:34:21.908684 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:21Z","lastTransitionTime":"2025-12-01T19:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.011065 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.011105 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.011121 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.011137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.011147 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.113391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.113450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.113466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.113484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.113494 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.215363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.215391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.215399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.215411 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.215420 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.318415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.318454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.318465 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.318481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.318493 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.420602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.420629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.420637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.420649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.420659 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.452983 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.453079 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.453210 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:22 crc kubenswrapper[4888]: E1201 19:34:22.453177 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:22 crc kubenswrapper[4888]: E1201 19:34:22.453297 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:22 crc kubenswrapper[4888]: E1201 19:34:22.453357 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.523233 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.523258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.523267 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.523279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.523288 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.625322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.625351 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.625369 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.625385 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.625397 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.729096 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.729396 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.729478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.729517 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.729587 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.831961 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.831998 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.832011 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.832030 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.832043 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.934654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.934688 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.934698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.934712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:22 crc kubenswrapper[4888]: I1201 19:34:22.934723 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:22Z","lastTransitionTime":"2025-12-01T19:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.037491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.037527 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.037538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.037555 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.037566 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.141099 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.141177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.141243 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.141271 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.141288 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.244370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.244428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.244441 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.244464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.244477 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.347632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.347706 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.347733 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.347770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.347792 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.450539 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:23 crc kubenswrapper[4888]: E1201 19:34:23.450693 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451144 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451175 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451208 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451224 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451238 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.451543 4888 scope.go:117] "RemoveContainer" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" Dec 01 19:34:23 crc kubenswrapper[4888]: E1201 19:34:23.451747 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.554451 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.554510 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.554520 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.554544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.554559 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.657776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.657845 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.657860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.657878 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.657893 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.761174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.761232 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.761245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.761262 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.761275 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.864023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.864100 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.864123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.864151 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.864170 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.966988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.967032 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.967041 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.967059 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:23 crc kubenswrapper[4888]: I1201 19:34:23.967071 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:23Z","lastTransitionTime":"2025-12-01T19:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.070971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.071035 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.071052 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.071081 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.071099 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.174554 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.174602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.174617 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.174639 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.174656 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.278628 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.278682 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.278694 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.278719 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.278733 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.381512 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.381635 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.381670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.381711 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.381738 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.451545 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.451637 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.451557 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:24 crc kubenswrapper[4888]: E1201 19:34:24.451755 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:24 crc kubenswrapper[4888]: E1201 19:34:24.451908 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:24 crc kubenswrapper[4888]: E1201 19:34:24.452156 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.484241 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.484299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.484309 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.484326 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.484337 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.588473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.588543 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.588559 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.588587 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.588603 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.691719 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.691817 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.691837 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.691867 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.691887 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.794002 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.794038 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.794047 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.794059 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.794069 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.897049 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.897124 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.897143 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.897174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.897233 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.999639 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.999768 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.999786 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.999810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:24 crc kubenswrapper[4888]: I1201 19:34:24.999829 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:24Z","lastTransitionTime":"2025-12-01T19:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.101976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.102030 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.102049 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.102072 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.102088 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.204870 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.204937 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.204970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.204999 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.205019 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.307771 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.307804 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.307812 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.307850 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.307860 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.410159 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.410276 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.410308 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.410338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.410358 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.451278 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:25 crc kubenswrapper[4888]: E1201 19:34:25.451515 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.513833 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.513883 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.513892 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.513909 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.513919 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.616613 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.616649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.616657 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.616673 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.616682 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.719264 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.719311 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.719320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.719334 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.719345 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.821888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.821954 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.821971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.821997 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.822014 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.924840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.924902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.924919 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.924948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:25 crc kubenswrapper[4888]: I1201 19:34:25.924967 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:25Z","lastTransitionTime":"2025-12-01T19:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.027507 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.027548 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.027562 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.027579 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.027591 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.104417 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:26 crc kubenswrapper[4888]: E1201 19:34:26.104624 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:26 crc kubenswrapper[4888]: E1201 19:34:26.104754 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:34:58.104726977 +0000 UTC m=+97.975756921 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.130854 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.130890 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.130900 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.130915 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.130924 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.234227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.234302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.234320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.234346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.234363 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.337073 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.337119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.337130 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.337145 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.337156 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.439561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.439597 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.439606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.439622 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.439631 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.450897 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.450922 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:26 crc kubenswrapper[4888]: E1201 19:34:26.451055 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.451071 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:26 crc kubenswrapper[4888]: E1201 19:34:26.451128 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:26 crc kubenswrapper[4888]: E1201 19:34:26.451219 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.542926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.542955 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.542963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.542976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.542988 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.645572 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.645631 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.645652 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.645680 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.645700 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.748602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.748637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.748646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.748663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.748674 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.850461 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.850547 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.850566 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.850634 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.850655 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.952802 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.952861 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.952875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.952893 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:26 crc kubenswrapper[4888]: I1201 19:34:26.952903 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:26Z","lastTransitionTime":"2025-12-01T19:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.055115 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.055151 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.055163 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.055179 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.055208 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.161207 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.161357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.161392 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.161444 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.161479 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.266384 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.266431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.266447 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.266468 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.266484 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.368672 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.368709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.368719 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.368734 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.368745 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.451102 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:27 crc kubenswrapper[4888]: E1201 19:34:27.451225 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.471595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.471651 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.471662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.471679 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.471692 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.574019 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.574057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.574067 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.574079 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.574090 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.676642 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.676706 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.676724 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.676750 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.676768 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.779070 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.779129 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.779140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.779153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.779164 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.806163 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/0.log" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.806285 4888 generic.go:334] "Generic (PLEG): container finished" podID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" containerID="4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a" exitCode=1 Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.806334 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerDied","Data":"4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.807117 4888 scope.go:117] "RemoveContainer" containerID="4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.825521 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.842666 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.856587 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:27Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.880921 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.881197 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.881329 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.881481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.881598 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.887033 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.901837 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.919866 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.933281 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.947330 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.961238 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.970330 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.980647 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.983699 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.983816 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.983891 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.983980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.984068 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:27Z","lastTransitionTime":"2025-12-01T19:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:27 crc kubenswrapper[4888]: I1201 19:34:27.992120 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:27Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.006041 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.017068 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.026359 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.037197 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.048497 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.085775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.085921 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.086024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.086126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.086233 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.188560 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.188592 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.188602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.188617 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.188629 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.290413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.290442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.290450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.290463 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.290471 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.393133 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.393383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.393452 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.393523 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.393593 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.451021 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:28 crc kubenswrapper[4888]: E1201 19:34:28.451146 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.451043 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.451021 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:28 crc kubenswrapper[4888]: E1201 19:34:28.451259 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:28 crc kubenswrapper[4888]: E1201 19:34:28.451322 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.495836 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.495879 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.495887 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.495901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.495912 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.598700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.598744 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.598761 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.598783 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.598800 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.701158 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.701242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.701260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.701287 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.701304 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.803561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.803618 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.803635 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.803659 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.803678 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.811722 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/0.log" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.811817 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerStarted","Data":"a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.823948 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.839435 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.850043 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.861021 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.876514 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.886378 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.897924 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907605 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907823 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907857 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907867 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907881 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.907892 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:28Z","lastTransitionTime":"2025-12-01T19:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.917887 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.931441 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.949886 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.961634 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.979504 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.989686 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:28 crc kubenswrapper[4888]: I1201 19:34:28.999331 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:28Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.009860 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:29Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.011567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.011602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.011616 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.011636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.011648 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.020952 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:29Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.113588 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.113634 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.113649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.113665 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.113677 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.215675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.215709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.215718 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.215730 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.215737 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.317289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.317315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.317322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.317332 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.317341 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.419569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.419621 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.419637 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.419653 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.419663 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.450560 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:29 crc kubenswrapper[4888]: E1201 19:34:29.450690 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.521340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.521381 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.521391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.521406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.521419 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.623595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.623632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.623639 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.623654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.623664 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.725281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.725320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.725331 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.725346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.725367 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.827281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.827312 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.827320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.827333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.827345 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.929972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.930024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.930034 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.930051 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:29 crc kubenswrapper[4888]: I1201 19:34:29.930064 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:29Z","lastTransitionTime":"2025-12-01T19:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.032250 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.032280 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.032289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.032302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.032311 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.135078 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.135158 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.135177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.135241 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.135260 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.200727 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.200788 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.200799 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.200817 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.200828 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.214117 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.217234 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.217295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.217306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.217326 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.217341 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.230253 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.234178 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.234258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.234272 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.234294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.234313 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.247278 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.250864 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.250907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.250922 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.250942 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.250958 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.262386 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.266142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.266198 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.266207 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.266228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.266238 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.279421 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.279589 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.281544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.281569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.281584 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.281603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.281616 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.384381 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.384421 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.384430 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.384444 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.384453 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.451346 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.451405 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.451449 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.451506 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.451642 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:30 crc kubenswrapper[4888]: E1201 19:34:30.451706 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.463634 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.474893 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.486259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.486290 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.486301 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.486316 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.486326 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.494493 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.503836 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.514555 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.524513 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.536079 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.554256 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.565004 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.575744 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.585933 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.588604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.588630 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.588638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.588661 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.588670 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.597696 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.605757 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.615108 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.624371 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.634597 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.647048 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:30Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.691972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.692257 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.692355 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.692444 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.692522 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.795227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.795255 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.795264 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.795277 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.795287 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.898024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.898880 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.899025 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.899122 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:30 crc kubenswrapper[4888]: I1201 19:34:30.899227 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:30Z","lastTransitionTime":"2025-12-01T19:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.001493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.001533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.001545 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.001573 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.001585 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.103660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.103701 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.103709 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.103723 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.103731 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.206010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.206054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.206084 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.206103 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.206114 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.307965 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.308222 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.308326 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.308412 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.308476 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.411112 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.411157 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.411167 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.411204 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.411215 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.450757 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:31 crc kubenswrapper[4888]: E1201 19:34:31.450902 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.513065 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.513114 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.513132 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.513151 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.513163 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.615820 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.615864 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.615873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.615888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.615898 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.717899 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.717934 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.717945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.717963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.717974 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.819757 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.820075 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.820219 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.820308 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.820376 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.922678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.922715 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.922729 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.922743 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:31 crc kubenswrapper[4888]: I1201 19:34:31.922752 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:31Z","lastTransitionTime":"2025-12-01T19:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.024773 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.025048 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.025109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.025171 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.025271 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.127803 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.127839 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.127847 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.127860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.127870 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.229718 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.229761 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.229775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.229790 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.229803 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.332577 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.332885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.332979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.333256 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.333333 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.435604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.435834 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.435929 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.435995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.436049 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.451033 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.451083 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.451033 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:32 crc kubenswrapper[4888]: E1201 19:34:32.451221 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:32 crc kubenswrapper[4888]: E1201 19:34:32.451262 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:32 crc kubenswrapper[4888]: E1201 19:34:32.451310 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.538565 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.538775 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.538893 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.538976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.539063 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.641152 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.641242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.641251 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.641268 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.641277 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.743124 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.743167 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.743179 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.743215 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.743229 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.845493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.845747 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.845834 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.845902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.845962 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.948316 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.948875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.948948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.949010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:32 crc kubenswrapper[4888]: I1201 19:34:32.949064 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:32Z","lastTransitionTime":"2025-12-01T19:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.051472 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.051508 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.051516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.051554 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.051563 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.154096 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.154151 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.154160 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.154174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.154201 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.256944 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.257258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.257379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.257483 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.257563 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.361847 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.361898 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.361909 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.361931 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.361943 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.451153 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:33 crc kubenswrapper[4888]: E1201 19:34:33.451540 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.465592 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.465667 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.465692 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.465721 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.465746 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.568804 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.568841 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.568853 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.568868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.568880 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.671360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.671461 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.671480 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.671515 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.671536 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.775742 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.775789 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.775798 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.775814 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.775825 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.878831 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.878886 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.878900 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.878919 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.878933 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.982678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.982739 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.982752 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.982771 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:33 crc kubenswrapper[4888]: I1201 19:34:33.982786 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:33Z","lastTransitionTime":"2025-12-01T19:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.085456 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.085553 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.085574 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.085608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.085630 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.189315 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.189363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.189376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.189400 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.189413 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.298090 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.298313 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.298348 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.298386 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.298469 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.403700 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.403762 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.403776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.403807 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.403822 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.450864 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.450863 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:34 crc kubenswrapper[4888]: E1201 19:34:34.451100 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.450889 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:34 crc kubenswrapper[4888]: E1201 19:34:34.451247 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:34 crc kubenswrapper[4888]: E1201 19:34:34.451385 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.507195 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.507254 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.507267 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.507291 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.507305 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.610791 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.610879 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.610898 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.610933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.610955 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.714512 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.714595 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.714622 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.714653 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.714672 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.817801 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.817842 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.817855 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.817875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.817893 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.921995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.922071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.922090 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.922126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:34 crc kubenswrapper[4888]: I1201 19:34:34.922150 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:34Z","lastTransitionTime":"2025-12-01T19:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.024830 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.024901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.024934 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.024954 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.024966 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.127885 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.127920 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.127932 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.127946 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.127955 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.230629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.230673 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.230681 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.230699 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.230708 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.334138 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.334252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.334283 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.334310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.334332 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.438407 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.438466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.438478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.438500 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.438513 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.450831 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:35 crc kubenswrapper[4888]: E1201 19:34:35.451452 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.451852 4888 scope.go:117] "RemoveContainer" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.541460 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.541546 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.541566 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.541599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.541620 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.644813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.644860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.644872 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.644889 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.644903 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.748336 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.748377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.748388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.748406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.748416 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.841254 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/2.log" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.844274 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.844815 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.851346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.851410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.851436 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.851466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.851496 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.862125 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.883360 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.904863 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.917678 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.937543 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.954001 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.954038 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.954047 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.954062 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.954072 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:35Z","lastTransitionTime":"2025-12-01T19:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.957211 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:35 crc kubenswrapper[4888]: I1201 19:34:35.983579 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.001462 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:35Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.015307 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.029902 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.040465 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.053657 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.056824 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.056879 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.056892 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.056911 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.056923 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.068339 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.080704 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.101324 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.111128 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.121637 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.159776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.159813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.159820 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.159836 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.159846 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.261787 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.261819 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.261826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.261839 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.261849 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.364148 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.364226 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.364241 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.364261 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.364274 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.451276 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.451706 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:36 crc kubenswrapper[4888]: E1201 19:34:36.451809 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.451895 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:36 crc kubenswrapper[4888]: E1201 19:34:36.455885 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:36 crc kubenswrapper[4888]: E1201 19:34:36.456050 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.466342 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.466379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.466393 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.466409 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.466423 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.569723 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.569784 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.569795 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.569813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.569828 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.672299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.672336 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.672350 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.672370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.672380 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.775574 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.775630 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.775643 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.775665 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.775682 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.849585 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/3.log" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.850280 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/2.log" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.853956 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" exitCode=1 Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.854039 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.854340 4888 scope.go:117] "RemoveContainer" containerID="a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.856729 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:34:36 crc kubenswrapper[4888]: E1201 19:34:36.857028 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.879926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.879979 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.879989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.880013 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.880025 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.880438 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.902848 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6678845efde2c373b298834acd3242dd7f18258ccea152af1b81ec76af948c6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:06Z\\\",\\\"message\\\":\\\"8 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1201 19:34:06.314007 6538 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 19:34:06.314076 6538 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 19:34:06.314082 6538 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 19:34:06.314066 6538 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1201 19:34:06.314096 6538 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 19:34:06.314106 6538 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 19:34:06.314110 6538 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 19:34:06.314130 6538 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 19:34:06.314148 6538 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 19:34:06.314207 6538 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1201 19:34:06.314220 6538 factory.go:656] Stopping watch factory\\\\nI1201 19:34:06.314240 6538 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:06.314283 6538 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 19:34:06.314295 6538 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:06.314479 6538 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:36Z\\\",\\\"message\\\":\\\"ce\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 19:34:36.362532 6905 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:36.362557 6905 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:36.362617 6905 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.914906 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.928173 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.943880 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.962650 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.978507 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.983351 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.983442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.983458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.983478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.983495 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:36Z","lastTransitionTime":"2025-12-01T19:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:36 crc kubenswrapper[4888]: I1201 19:34:36.993973 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:36Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.012997 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.030714 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.049886 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.065285 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.082419 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.086594 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.086651 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.086660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.086677 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.086688 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.096702 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.117032 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.141098 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.159708 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.189278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.189369 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.189405 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.189445 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.189473 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.291542 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.291610 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.291623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.291639 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.291650 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.394495 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.394529 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.394539 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.394558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.394568 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.450344 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:37 crc kubenswrapper[4888]: E1201 19:34:37.450495 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.497386 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.497426 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.497438 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.497457 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.497472 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.599905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.599988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.600014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.600046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.600065 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.702475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.702525 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.702541 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.702561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.702573 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.805590 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.805642 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.805658 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.805689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.805707 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.860910 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/3.log" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.871458 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:34:37 crc kubenswrapper[4888]: E1201 19:34:37.871998 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.890301 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.909142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.909230 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.909256 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.909284 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.909303 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:37Z","lastTransitionTime":"2025-12-01T19:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.913701 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.934499 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.948777 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.966389 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.981513 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:37 crc kubenswrapper[4888]: I1201 19:34:37.997452 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:37Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.012039 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.012084 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.012099 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.012119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.012134 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.014550 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.029348 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.059859 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:36Z\\\",\\\"message\\\":\\\"ce\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 19:34:36.362532 6905 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:36.362557 6905 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:36.362617 6905 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.072071 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.090824 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.105469 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.114246 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.114285 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.114296 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.114311 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.114325 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.125746 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.144081 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.158480 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.173424 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:38Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.217174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.217270 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.217289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.217313 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.217331 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.320376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.320424 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.320439 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.320457 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.320472 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.423031 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.423078 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.423091 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.423109 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.423122 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.450403 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.450439 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.450403 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:38 crc kubenswrapper[4888]: E1201 19:34:38.450513 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:38 crc kubenswrapper[4888]: E1201 19:34:38.450629 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:38 crc kubenswrapper[4888]: E1201 19:34:38.450673 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.467344 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.525312 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.525360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.525371 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.525391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.525404 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.627432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.627475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.627485 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.627500 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.627509 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.733036 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.733097 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.733108 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.733125 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.733135 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.835725 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.835767 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.835778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.835794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.835805 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.938111 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.938158 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.938170 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.938210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:38 crc kubenswrapper[4888]: I1201 19:34:38.938223 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:38Z","lastTransitionTime":"2025-12-01T19:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.041149 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.041248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.041272 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.041299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.041320 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.144121 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.144165 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.144176 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.144216 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.144230 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.247347 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.247413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.247430 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.247466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.247485 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.349543 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.349596 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.349612 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.349634 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.349648 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.453328 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.453377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.453393 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.453414 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.453430 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.454239 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:39 crc kubenswrapper[4888]: E1201 19:34:39.454443 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.556608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.556649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.556660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.556683 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.556698 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.659872 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.659910 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.659918 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.659933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.659944 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.762357 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.762408 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.762417 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.762433 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.762442 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.864971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.865023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.865038 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.865059 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.865078 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.968278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.968328 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.968340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.968358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:39 crc kubenswrapper[4888]: I1201 19:34:39.968370 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:39Z","lastTransitionTime":"2025-12-01T19:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.072243 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.072340 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.072361 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.072389 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.072408 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.175366 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.175434 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.175452 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.175476 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.175495 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.278014 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.278072 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.278089 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.278116 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.278136 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.381584 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.381625 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.381636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.381652 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.381664 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.450685 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.451014 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.451472 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.451489 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.451695 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.451784 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.469412 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:36Z\\\",\\\"message\\\":\\\"ce\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 19:34:36.362532 6905 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:36.362557 6905 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:36.362617 6905 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.479773 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.483912 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.483946 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.483955 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.483970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.483981 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.493447 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.526890 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c7dac82-816f-44a8-ba29-1c9aab3b302e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6229a03c2ac283713c2bbdb42ae3486a742f13a0d8f36221c926b3a1f6839b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bab3cb8d2dec74792ffcf0d50622194a297e1bec87ebbf8d2ad4ff3ad2b460a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://029c98a5fa0e3afc0bba50293a88bb7978a002bff6c87cdb9cf86c0165a95e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3f8a6d3dfbe9fe4c6304608f75951ba4499463f344d4ce2d76e00ebc544082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a082e7eb03d0a6e1a89af241ad1bdec5406448a5a5c008014b14b589d6e0b381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.564694 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.582357 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.586212 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.586265 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.586279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.586301 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.586318 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.595231 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.596036 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.596070 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.596080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.596094 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.596105 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.612126 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.613947 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.615800 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.615825 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.615833 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.615847 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.615857 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.625567 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.629116 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.632627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.632677 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.632692 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.632716 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.632736 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.639933 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.646372 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.650844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.650875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.650884 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.650903 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.650914 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.657490 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.664326 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.668228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.668303 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.668318 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.668337 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.668379 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.672619 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.680880 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: E1201 19:34:40.680996 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.688916 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.688922 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.688999 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.689021 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.689055 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.689074 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.703685 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.718779 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.734319 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.750226 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.762798 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:40Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.791641 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.791685 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.791695 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.791710 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.791725 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.895362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.895424 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.895443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.895473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.895491 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.999007 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.999094 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.999121 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.999157 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:40 crc kubenswrapper[4888]: I1201 19:34:40.999178 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:40Z","lastTransitionTime":"2025-12-01T19:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.103429 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.103521 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.103551 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.103589 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.103617 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.207873 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.207951 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.207972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.208005 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.208030 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.311526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.311604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.311627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.311658 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.311676 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.417325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.417469 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.417497 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.417557 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.417591 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.451145 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:41 crc kubenswrapper[4888]: E1201 19:34:41.451391 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.521390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.521458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.521475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.521502 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.521518 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.624323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.624397 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.624421 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.624448 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.624468 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.727695 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.727782 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.727801 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.727827 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.727845 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.831036 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.831076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.831085 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.831098 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.831107 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.934066 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.934111 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.934123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.934141 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:41 crc kubenswrapper[4888]: I1201 19:34:41.934154 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:41Z","lastTransitionTime":"2025-12-01T19:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.037140 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.037260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.037286 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.037319 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.037342 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.139770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.139844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.139856 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.139874 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.139890 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.243464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.243546 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.243570 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.243605 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.243628 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.347440 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.347936 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.348142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.348331 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.348485 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.377963 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.378055 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.378111 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.378241 4888 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.378294 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:46.378279532 +0000 UTC m=+146.249309446 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.378464 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:46.378442267 +0000 UTC m=+146.249472181 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.378494 4888 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.378517 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:46.378510169 +0000 UTC m=+146.249540083 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.450353 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.450411 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.450460 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.450536 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.450664 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.450742 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.451786 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.451848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.451867 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.451896 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.451919 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.479625 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.479926 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.479807 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.480410 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.480577 4888 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.480798 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:46.480771334 +0000 UTC m=+146.351801288 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.480014 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.481096 4888 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.481264 4888 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:42 crc kubenswrapper[4888]: E1201 19:34:42.481453 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:46.481431172 +0000 UTC m=+146.352461126 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.554537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.554578 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.554587 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.554602 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.554611 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.657868 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.657917 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.657926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.657941 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.657951 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.760464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.760503 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.760511 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.760524 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.760534 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.863236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.863323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.863332 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.863346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.863355 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.966117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.966162 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.966173 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.966211 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:42 crc kubenswrapper[4888]: I1201 19:34:42.966224 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:42Z","lastTransitionTime":"2025-12-01T19:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.069652 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.069716 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.069729 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.069748 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.069760 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.174210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.174616 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.174631 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.174655 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.174668 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.277903 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.277971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.277986 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.278002 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.278015 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.380447 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.380484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.380495 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.380509 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.380517 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.450801 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:43 crc kubenswrapper[4888]: E1201 19:34:43.450981 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.483714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.483823 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.483856 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.483893 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.483919 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.588042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.588113 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.588135 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.588167 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.588221 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.692251 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.692894 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.693120 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.693317 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.693458 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.796932 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.797001 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.797025 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.797057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.797081 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.900983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.901042 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.901060 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.901088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:43 crc kubenswrapper[4888]: I1201 19:34:43.901108 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:43Z","lastTransitionTime":"2025-12-01T19:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.004854 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.004901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.004918 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.004942 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.004962 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.108406 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.108486 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.108504 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.108541 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.108561 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.212210 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.212285 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.212298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.212317 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.212330 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.315515 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.315555 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.315564 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.315579 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.315588 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.418259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.418306 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.418319 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.418338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.418350 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.451022 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.451091 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:44 crc kubenswrapper[4888]: E1201 19:34:44.451179 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:44 crc kubenswrapper[4888]: E1201 19:34:44.451367 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.451483 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:44 crc kubenswrapper[4888]: E1201 19:34:44.451645 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.521654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.521705 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.521714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.521729 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.521739 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.625444 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.625511 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.625523 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.625540 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.625549 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.728537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.728614 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.728633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.728665 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.728692 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.831574 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.832160 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.832415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.832610 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.832819 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.936025 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.936112 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.936137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.936170 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:44 crc kubenswrapper[4888]: I1201 19:34:44.936229 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:44Z","lastTransitionTime":"2025-12-01T19:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.040660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.040768 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.040792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.040826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.040852 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.143389 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.143462 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.143483 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.143521 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.143540 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.246576 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.246629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.246642 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.246671 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.246698 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.351394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.351470 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.351493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.351522 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.351937 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.450407 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:45 crc kubenswrapper[4888]: E1201 19:34:45.450750 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.456696 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.456787 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.456801 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.456827 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.456847 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.559601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.559662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.559674 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.559696 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.559709 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.663397 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.663476 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.663502 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.663531 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.663552 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.766040 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.766215 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.766230 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.766248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.766261 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.868408 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.868447 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.868454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.868466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.868475 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.971167 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.971265 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.971286 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.971314 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:45 crc kubenswrapper[4888]: I1201 19:34:45.971334 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:45Z","lastTransitionTime":"2025-12-01T19:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.074474 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.074544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.074569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.074603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.074629 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.178572 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.178622 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.178632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.178652 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.178666 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.281638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.281743 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.281800 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.281828 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.281845 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.386622 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.386663 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.386678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.386698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.386714 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.451089 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.451089 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.451089 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:46 crc kubenswrapper[4888]: E1201 19:34:46.451499 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:46 crc kubenswrapper[4888]: E1201 19:34:46.451597 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:46 crc kubenswrapper[4888]: E1201 19:34:46.451741 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.490165 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.490257 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.490278 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.490302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.490323 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.593708 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.593754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.593768 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.593789 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.593801 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.696479 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.696546 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.696558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.696578 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.696592 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.799633 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.799723 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.799753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.799788 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.799816 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.903287 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.903402 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.903428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.903466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:46 crc kubenswrapper[4888]: I1201 19:34:46.903492 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:46Z","lastTransitionTime":"2025-12-01T19:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.007789 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.007857 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.007876 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.007905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.007925 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.111640 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.111795 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.111820 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.111851 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.111871 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.215914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.216025 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.216047 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.216084 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.216110 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.321289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.321377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.321402 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.321436 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.321474 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.424698 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.424774 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.424798 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.424834 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.424855 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.451134 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:47 crc kubenswrapper[4888]: E1201 19:34:47.451322 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.529088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.529181 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.529499 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.529533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.529554 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.633848 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.633933 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.633952 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.633980 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.634006 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.736847 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.736911 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.736923 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.736945 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.736961 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.841133 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.841225 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.841238 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.841260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.841275 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.945259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.945323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.945346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.945373 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:47 crc kubenswrapper[4888]: I1201 19:34:47.945393 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:47Z","lastTransitionTime":"2025-12-01T19:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.048533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.048612 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.048638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.048670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.048706 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.152368 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.152448 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.152472 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.152507 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.152530 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.254902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.254954 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.254966 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.254984 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.254994 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.358974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.359067 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.359092 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.359132 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.359157 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.451251 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.451438 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:48 crc kubenswrapper[4888]: E1201 19:34:48.451562 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.451258 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:48 crc kubenswrapper[4888]: E1201 19:34:48.451734 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:48 crc kubenswrapper[4888]: E1201 19:34:48.451778 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.461835 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.461905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.461925 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.461956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.461975 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.564978 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.565066 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.565093 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.565129 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.565172 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.669046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.669107 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.669125 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.669150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.669169 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.772961 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.773037 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.773056 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.773088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.773109 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.877121 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.877177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.877215 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.877236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.877248 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.983719 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.983773 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.983784 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.983803 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:48 crc kubenswrapper[4888]: I1201 19:34:48.983814 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:48Z","lastTransitionTime":"2025-12-01T19:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.086847 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.086906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.086925 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.086952 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.086969 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.191382 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.191463 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.191482 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.191511 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.191536 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.295515 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.295587 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.295611 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.295646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.295674 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.399593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.399644 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.399654 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.399671 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.399683 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.450960 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:49 crc kubenswrapper[4888]: E1201 19:34:49.451379 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.503238 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.503298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.503318 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.503349 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.503375 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.606679 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.606736 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.606753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.606778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.606795 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.709973 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.710052 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.710067 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.710091 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.710108 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.814394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.814442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.814455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.814474 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.814485 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.918086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.918144 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.918157 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.918196 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:49 crc kubenswrapper[4888]: I1201 19:34:49.918214 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:49Z","lastTransitionTime":"2025-12-01T19:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.022402 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.022479 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.022499 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.022533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.022555 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.126530 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.126608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.126636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.126673 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.126701 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.231323 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.231380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.231391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.231411 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.231487 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.335086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.335139 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.335159 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.335221 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.335245 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.438514 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.438604 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.438623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.438656 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.438675 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.451021 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.451100 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:50 crc kubenswrapper[4888]: E1201 19:34:50.451336 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.451430 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:50 crc kubenswrapper[4888]: E1201 19:34:50.451699 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:50 crc kubenswrapper[4888]: E1201 19:34:50.451820 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.471562 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.492589 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.513340 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.532669 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.542548 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.542632 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.542668 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.542695 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.542709 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.558070 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.580072 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.600373 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.625805 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:36Z\\\",\\\"message\\\":\\\"ce\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 19:34:36.362532 6905 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:36.362557 6905 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:36.362617 6905 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.640804 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.651605 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.651660 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.651675 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.651697 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.651712 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.657786 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.691219 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c7dac82-816f-44a8-ba29-1c9aab3b302e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6229a03c2ac283713c2bbdb42ae3486a742f13a0d8f36221c926b3a1f6839b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bab3cb8d2dec74792ffcf0d50622194a297e1bec87ebbf8d2ad4ff3ad2b460a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://029c98a5fa0e3afc0bba50293a88bb7978a002bff6c87cdb9cf86c0165a95e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3f8a6d3dfbe9fe4c6304608f75951ba4499463f344d4ce2d76e00ebc544082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a082e7eb03d0a6e1a89af241ad1bdec5406448a5a5c008014b14b589d6e0b381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.713952 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.737014 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.753319 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.755294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.755337 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.755356 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.755382 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.755400 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.771830 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.790135 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.804766 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.820446 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:50Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.859766 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.859821 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.859840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.859871 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.859893 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.962551 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.962620 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.962640 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.962670 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:50 crc kubenswrapper[4888]: I1201 19:34:50.962692 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:50Z","lastTransitionTime":"2025-12-01T19:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.014591 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.014714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.014737 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.014770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.014794 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.038480 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.044492 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.044589 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.044615 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.044678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.044701 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.069498 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.075624 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.075685 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.075708 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.075741 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.075762 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.098545 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.105364 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.105434 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.105506 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.105542 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.105568 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.127548 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.132601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.132662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.132688 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.132731 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.132754 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.152828 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:34:51Z is after 2025-08-24T17:21:41Z" Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.153050 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.155279 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.155348 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.155376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.155405 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.155423 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.258648 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.258792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.258815 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.259538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.259584 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.363083 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.363205 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.363229 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.363252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.363268 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.450523 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:51 crc kubenswrapper[4888]: E1201 19:34:51.451323 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.468227 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.470478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.470544 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.470567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.470599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.470625 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.573689 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.573751 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.573762 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.573781 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.573792 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.678355 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.678418 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.678442 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.678475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.678498 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.783469 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.783530 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.783555 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.783724 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.783758 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.887213 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.887259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.887269 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.887289 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.887302 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.991118 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.991207 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.991222 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.991242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:51 crc kubenswrapper[4888]: I1201 19:34:51.991284 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:51Z","lastTransitionTime":"2025-12-01T19:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.094245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.094322 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.094347 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.094375 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.094394 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.197373 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.197448 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.197467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.197495 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.197514 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.300168 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.300320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.300388 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.300445 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.300486 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.402762 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.402871 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.402898 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.402929 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.402954 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.451410 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.451727 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.451828 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:52 crc kubenswrapper[4888]: E1201 19:34:52.452001 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:52 crc kubenswrapper[4888]: E1201 19:34:52.452303 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:52 crc kubenswrapper[4888]: E1201 19:34:52.452151 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.453447 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:34:52 crc kubenswrapper[4888]: E1201 19:34:52.453735 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.504934 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.505010 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.505029 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.505057 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.505077 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.608879 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.608926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.608947 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.608967 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.608982 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.712569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.712627 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.712643 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.712662 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.712674 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.815414 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.815454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.815466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.815481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.815491 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.917712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.917762 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.917774 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.917789 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:52 crc kubenswrapper[4888]: I1201 19:34:52.917800 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:52Z","lastTransitionTime":"2025-12-01T19:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.020972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.021024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.021036 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.021058 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.021071 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.124290 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.124350 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.124362 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.124380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.124395 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.228080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.228164 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.228259 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.228299 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.228323 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.332561 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.332668 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.332682 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.332699 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.332708 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.436002 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.436061 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.436076 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.436105 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.436124 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.450808 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:53 crc kubenswrapper[4888]: E1201 19:34:53.450991 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.539821 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.539886 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.539926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.539991 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.540018 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.643650 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.643719 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.643738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.643767 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.643787 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.748117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.748263 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.748337 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.748379 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.748401 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.852298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.852376 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.852401 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.852434 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.852465 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.956242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.956298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.956317 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.956342 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:53 crc kubenswrapper[4888]: I1201 19:34:53.956361 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:53Z","lastTransitionTime":"2025-12-01T19:34:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.059951 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.060019 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.060043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.060075 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.060098 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.162850 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.162950 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.162965 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.162983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.162996 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.266794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.266923 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.266956 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.267028 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.267054 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.370963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.371056 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.371081 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.371115 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.371139 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.451069 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.451349 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:54 crc kubenswrapper[4888]: E1201 19:34:54.451550 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.451907 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:54 crc kubenswrapper[4888]: E1201 19:34:54.452074 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:54 crc kubenswrapper[4888]: E1201 19:34:54.452419 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.474229 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.474294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.474311 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.474336 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.474352 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.578364 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.578438 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.578455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.578478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.578494 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.681079 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.681147 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.681166 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.681218 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.681237 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.786232 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.786310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.786321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.786342 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.786358 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.889117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.889168 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.889207 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.889235 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.889255 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.994704 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.994786 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.994808 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.994840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:54 crc kubenswrapper[4888]: I1201 19:34:54.994865 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:54Z","lastTransitionTime":"2025-12-01T19:34:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.112596 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.112666 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.112678 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.112697 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.112711 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.214963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.215052 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.215078 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.215116 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.215150 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.318501 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.318553 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.318562 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.318576 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.318584 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.421319 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.421358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.421372 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.421393 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.421406 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.450346 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:55 crc kubenswrapper[4888]: E1201 19:34:55.450512 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.523870 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.523938 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.523948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.523962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.523972 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.628258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.628396 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.628506 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.628541 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.628618 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.731295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.731351 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.731367 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.731386 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.731399 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.834888 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.834959 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.834974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.835005 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.835029 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.938011 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.938058 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.938068 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.938086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:55 crc kubenswrapper[4888]: I1201 19:34:55.938097 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:55Z","lastTransitionTime":"2025-12-01T19:34:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.041902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.041974 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.041996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.042023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.042038 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.146405 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.146509 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.146537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.146571 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.146595 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.250321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.250403 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.250475 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.250510 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.250534 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.354471 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.354575 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.354596 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.354629 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.354650 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.450922 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.451093 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.451152 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:56 crc kubenswrapper[4888]: E1201 19:34:56.451288 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:56 crc kubenswrapper[4888]: E1201 19:34:56.451451 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:56 crc kubenswrapper[4888]: E1201 19:34:56.451636 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.457530 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.457601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.457638 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.457674 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.457700 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.561284 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.561396 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.561421 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.561456 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.561480 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.665280 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.665344 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.665360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.665387 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.665406 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.769688 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.769759 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.769785 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.769815 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.769835 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.873877 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.873963 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.873988 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.874020 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.874046 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.977732 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.977794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.977811 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.977832 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:56 crc kubenswrapper[4888]: I1201 19:34:56.977848 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:56Z","lastTransitionTime":"2025-12-01T19:34:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.081383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.081451 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.081465 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.081487 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.081500 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.184913 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.184969 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.184986 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.185006 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.185019 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.289433 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.289526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.289552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.289588 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.289614 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.393782 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.393860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.393875 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.393897 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.393915 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.450743 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:57 crc kubenswrapper[4888]: E1201 19:34:57.450907 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.496300 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.496375 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.496396 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.496428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.496452 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.600364 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.600419 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.600431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.600453 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.600464 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.704055 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.704122 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.704143 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.704169 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.704222 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.807712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.807784 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.807801 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.807828 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.807847 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.911494 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.911545 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.911559 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.911575 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:57 crc kubenswrapper[4888]: I1201 19:34:57.911588 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:57Z","lastTransitionTime":"2025-12-01T19:34:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.014725 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.014794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.014811 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.014841 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.014862 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.117908 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.117971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.117984 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.118008 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.118024 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.174660 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:58 crc kubenswrapper[4888]: E1201 19:34:58.174965 4888 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:58 crc kubenswrapper[4888]: E1201 19:34:58.175088 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs podName:4a71b974-d433-46e2-904d-2d955ba74014 nodeName:}" failed. No retries permitted until 2025-12-01 19:36:02.175056773 +0000 UTC m=+162.046086717 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs") pod "network-metrics-daemon-gb7nn" (UID: "4a71b974-d433-46e2-904d-2d955ba74014") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.220920 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.220995 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.221020 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.221050 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.221070 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.324159 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.324263 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.324286 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.324319 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.324344 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.427478 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.427535 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.427545 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.427564 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.427574 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.450374 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.450500 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:34:58 crc kubenswrapper[4888]: E1201 19:34:58.450594 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:34:58 crc kubenswrapper[4888]: E1201 19:34:58.450704 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.450404 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:34:58 crc kubenswrapper[4888]: E1201 19:34:58.450887 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.531646 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.531723 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.531748 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.531778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.531804 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.634958 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.635015 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.635026 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.635053 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.635071 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.738017 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.738101 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.738115 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.738139 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.738157 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.840902 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.840949 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.840964 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.840983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.840996 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.943403 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.943459 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.943470 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.943484 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:58 crc kubenswrapper[4888]: I1201 19:34:58.943493 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:58Z","lastTransitionTime":"2025-12-01T19:34:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.046844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.046924 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.046957 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.046992 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.047014 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.151500 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.151569 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.151583 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.151608 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.151624 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.254750 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.254814 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.254835 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.254860 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.254876 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.357417 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.357492 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.357512 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.357538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.357559 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.450283 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:34:59 crc kubenswrapper[4888]: E1201 19:34:59.450580 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.459738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.459770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.459778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.459791 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.459802 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.562390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.562473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.562491 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.562516 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.562537 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.665837 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.665895 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.665916 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.665939 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.665956 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.768526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.768563 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.768575 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.768591 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.768603 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.870409 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.870450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.870459 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.870471 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.870481 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.972694 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.972744 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.972754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.972772 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:34:59 crc kubenswrapper[4888]: I1201 19:34:59.972783 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:34:59Z","lastTransitionTime":"2025-12-01T19:34:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.075260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.075295 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.075302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.075314 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.075324 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.177552 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.177594 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.177606 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.177625 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.177635 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.280028 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.280071 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.280080 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.280094 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.280105 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.384377 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.384414 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.384423 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.384435 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.384444 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.450255 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.450252 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.450407 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:00 crc kubenswrapper[4888]: E1201 19:35:00.450501 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:00 crc kubenswrapper[4888]: E1201 19:35:00.450740 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:00 crc kubenswrapper[4888]: E1201 19:35:00.450915 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.464717 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a76922c921dc29bbcbb1d4d6be3c2a4f9262325e8c9751f9f7db27c604c1a780\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.476368 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.488203 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.488236 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.488245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.488261 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.488271 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.494001 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.511574 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d432d9cf-070c-4c1f-997e-481b0087a5a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fabd9677a0bbf12844ad10d397fa7975890f526c1c8e27e7bb1c90bc05eb5ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c091d0634fed6800d4e90a59b0035e5c9ada0c1289e821e243bbedf587ef0b69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee8f28310ed627bac9a411a936b4accb7b2f59c8bd6d8bc30612aa732d718f8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://128651e18bb02c5e4ad8aa0c15ff70caaa277def57176bc329d8ee87c53e485b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7d300381c7fbe15cc71e2e72d7224fa959480d3c479bdd558c751a596970bc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://781680199d66883f2eba2f1f6a545171f4b27aec6c9aed1f4bfecce79d33b108\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ab38da0da3f8a6bce22301997caf92302d848a13530df0a056a7844decfa2f2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q6k46\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-c5qc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.522871 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4a71b974-d433-46e2-904d-2d955ba74014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hwlh5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gb7nn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.533822 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff78da6a-12d7-4258-82df-da743786c4ef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e47c76ce206a2e9bafeb6931c5b1e7eab2de9f751b23a4fb313d7e80bce2522d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://783f431aa2e798cfad1af57cba2486a0542db54a26c080054d0171150eccc4ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://783f431aa2e798cfad1af57cba2486a0542db54a26c080054d0171150eccc4ea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.547346 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf3eb23c1ccc34e4cebedb367c6f1c18c0181edd7b96ae44950599bac1ace079\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.557882 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"79f63bfb-e11c-4a38-a47f-3162cca30e66\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://92f8c36ca70a60c394300c0f3d6876294f805febd3684c149146910444ad69e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6bd1c375dfa6ea3c3fbde5137ff776a076331543d7a04ac1cad5631cd5d36a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5mmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:52Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-sttz9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.569580 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5d0369b0-8387-41d3-9576-0f21bacbb39b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f279f031c084f4319f3961088d0d641ea8670560c91145b05b43b06266cbbc8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3a9867147e75530c78be6ea788aed45f7939bd1c84b185430b5fc62fc28e223\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2fa1b353868ae16fb89895ed6aae9e20adfa252dc40b9bc4a02b69968bd0c56\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.582636 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a8b2891-c633-4161-89d6-12f4270339ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1201 19:33:33.058272 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1201 19:33:33.059969 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-694308658/tls.crt::/tmp/serving-cert-694308658/tls.key\\\\\\\"\\\\nI1201 19:33:38.730321 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1201 19:33:38.732474 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1201 19:33:38.732492 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1201 19:33:38.732515 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1201 19:33:38.732520 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1201 19:33:38.738331 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1201 19:33:38.738270 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1201 19:33:38.738381 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738387 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1201 19:33:38.738398 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1201 19:33:38.738403 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1201 19:33:38.738407 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1201 19:33:38.738411 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1201 19:33:38.739555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.590282 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.590329 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.590339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.590359 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.590372 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.592803 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ce4a5cfb8b26dbd73f70a7ad15eaff9b602ed9c1680d98191186efb440cbf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f696371564e835fe28399fd99b5df6272f24cae24994f67fa93b16315b8f457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.601131 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kjkx6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e511a2b8-6ef4-4788-9975-1801322e1d9d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4147b9d8704a600fc7fc9f3174883a5a657a46d2d72bc6c57f276c44fd476637\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nlp7b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kjkx6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.610110 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6a551e8a-d979-4cdb-87f5-1075b0b49a36\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6737b484bda21d4e927d25664d63a6e67579ba015ad4ea39e7ab9f914bf667e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d2b6j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jcmzp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.617423 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tqpk6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e63626be-5025-460a-85bd-236bf6ece71a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3331c9de5f9ff15c42c26dba485c35436d132df133bff7930b1fd12248eb57d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ks8wk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:44Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tqpk6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.626115 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9a2057ff-6a82-4380-9988-1cd64c840cdf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e35cd1e16012a0d0df03bd43be30a291f530edff14b0a6763f0852df79c91bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b79245f304d984da676066f298a09139e18b8f73372d39c0ee81f4e4a55467e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ff110fefa9b522d5b0ad112ca0fd5fdde744e2bef78ae50b41da76c9e3d77cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://03405e06456c21ec0e5282265641f043754d22f90ab32c39549b87060466b609\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.642567 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c7dac82-816f-44a8-ba29-1c9aab3b302e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b6229a03c2ac283713c2bbdb42ae3486a742f13a0d8f36221c926b3a1f6839b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bab3cb8d2dec74792ffcf0d50622194a297e1bec87ebbf8d2ad4ff3ad2b460a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://029c98a5fa0e3afc0bba50293a88bb7978a002bff6c87cdb9cf86c0165a95e09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3f8a6d3dfbe9fe4c6304608f75951ba4499463f344d4ce2d76e00ebc544082\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a082e7eb03d0a6e1a89af241ad1bdec5406448a5a5c008014b14b589d6e0b381\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfbb8699d1761315b7b3ce66d70976820da7784e62bfd965f0773d300352bd38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa4f30d65e0301718dbea1ea6c2cf263b3c285a2ba4a1aae3f264ee26e05176f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:21Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fdedf7160d6ee7edc915a53af2239ffbb5f06105e089ae4b3614fdfbe868d857\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:20Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.654053 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:38Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.665273 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hfpdh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:34:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:26Z\\\",\\\"message\\\":\\\"2025-12-01T19:33:41+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020\\\\n2025-12-01T19:33:41+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_b35b12c6-3bde-492e-94bf-064a637ae020 to /host/opt/cni/bin/\\\\n2025-12-01T19:33:41Z [verbose] multus-daemon started\\\\n2025-12-01T19:33:41Z [verbose] Readiness Indicator file check\\\\n2025-12-01T19:34:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:34:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmq8v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:40Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hfpdh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.680242 4888 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T19:34:36Z\\\",\\\"message\\\":\\\"ce\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.183\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 19:34:36.362532 6905 ovnkube.go:599] Stopped ovnkube\\\\nI1201 19:34:36.362557 6905 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1201 19:34:36.362617 6905 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T19:34:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T19:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T19:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T19:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxc5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T19:33:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-f4wj6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:00Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.693154 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.693213 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.693229 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.693248 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.693260 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.796077 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.796119 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.796128 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.796143 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.796155 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.898237 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.898298 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.898310 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.898331 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:00 crc kubenswrapper[4888]: I1201 19:35:00.898347 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:00Z","lastTransitionTime":"2025-12-01T19:35:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.002260 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.002331 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.002346 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.002383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.002407 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.105370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.105419 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.105428 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.105443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.105453 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.210147 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.210212 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.210227 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.210246 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.210259 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.313317 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.313378 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.313392 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.313415 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.313434 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.401706 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.401751 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.401759 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.401773 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.401785 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.412862 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:01Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.416004 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.416085 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.416104 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.416133 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.416153 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.428865 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:01Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.432454 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.432496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.432505 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.432521 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.432530 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.444489 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:01Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.447904 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.447938 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.447947 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.447962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.447971 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.451114 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.451234 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.458363 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:01Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.461380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.461419 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.461427 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.461443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.461454 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.474803 4888 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T19:35:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"66ac84ba-9185-46f7-8b54-e8d4aaa37c1c\\\",\\\"systemUUID\\\":\\\"c809babe-48a2-4ca7-84bd-33ee12868d2c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T19:35:01Z is after 2025-08-24T17:21:41Z" Dec 01 19:35:01 crc kubenswrapper[4888]: E1201 19:35:01.475013 4888 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.477293 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.477325 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.477457 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.477483 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.477497 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.580514 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.580567 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.580579 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.580599 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.580618 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.683413 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.683787 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.683915 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.684026 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.684127 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.787380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.787429 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.787440 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.787455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.787470 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.890176 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.890693 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.890826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.891022 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.891119 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.994015 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.994533 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.994681 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.994880 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:01 crc kubenswrapper[4888]: I1201 19:35:01.995092 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:01Z","lastTransitionTime":"2025-12-01T19:35:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.098272 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.098320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.098334 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.098358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.098373 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.200907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.200948 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.200957 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.200971 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.200980 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.302776 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.302843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.302859 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.302882 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.302899 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.404893 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.404985 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.405009 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.405041 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.405065 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.450599 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:02 crc kubenswrapper[4888]: E1201 19:35:02.450782 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.450839 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.450927 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:02 crc kubenswrapper[4888]: E1201 19:35:02.451006 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:02 crc kubenswrapper[4888]: E1201 19:35:02.451352 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.508302 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.508414 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.508456 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.508498 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.508528 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.611291 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.611363 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.611378 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.611404 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.611429 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.713934 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.714024 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.714038 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.714070 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.714088 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.817455 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.817570 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.817593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.817626 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.817649 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.921593 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.921673 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.921693 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.921730 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:02 crc kubenswrapper[4888]: I1201 19:35:02.921752 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:02Z","lastTransitionTime":"2025-12-01T19:35:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.024520 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.024582 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.024601 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.024623 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.024642 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.127932 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.128034 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.128053 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.128120 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.128141 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.231856 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.231896 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.231906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.231919 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.231930 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.335691 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.335760 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.335773 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.335797 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.335812 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.438968 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.439048 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.439069 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.439107 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.439128 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.450774 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:03 crc kubenswrapper[4888]: E1201 19:35:03.451023 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.451925 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:35:03 crc kubenswrapper[4888]: E1201 19:35:03.452152 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-f4wj6_openshift-ovn-kubernetes(578ef97f-2ce3-405a-9f4e-fcaa5f98df07)\"" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.542410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.542481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.542498 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.542524 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.542544 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.647006 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.647086 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.647112 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.647152 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.647177 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.750414 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.750450 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.750458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.750473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.750482 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.853430 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.853514 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.853532 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.853568 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.853592 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.956983 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.957069 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.957098 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.957137 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:03 crc kubenswrapper[4888]: I1201 19:35:03.957163 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:03Z","lastTransitionTime":"2025-12-01T19:35:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.060139 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.060206 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.060217 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.060238 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.060253 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.164043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.164125 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.164147 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.164177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.164245 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.267370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.267425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.267441 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.267464 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.267480 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.371054 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.371110 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.371123 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.371138 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.371148 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.450311 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.450357 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:04 crc kubenswrapper[4888]: E1201 19:35:04.450445 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.450474 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:04 crc kubenswrapper[4888]: E1201 19:35:04.450580 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:04 crc kubenswrapper[4888]: E1201 19:35:04.450624 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.474216 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.474255 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.474265 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.474280 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.474297 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.576792 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.576843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.576857 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.576876 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.576893 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.679273 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.679313 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.679324 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.679339 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.679351 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.781751 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.782045 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.782126 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.782244 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.782329 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.884824 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.884936 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.884950 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.884967 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.884978 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.987625 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.987671 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.987686 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.987707 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:04 crc kubenswrapper[4888]: I1201 19:35:04.987722 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:04Z","lastTransitionTime":"2025-12-01T19:35:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.089819 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.089858 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.089867 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.089881 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.089892 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.192855 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.192894 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.192905 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.192921 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.192930 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.295467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.295509 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.295520 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.295535 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.295545 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.397399 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.397432 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.397443 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.397458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.397467 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.450380 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:05 crc kubenswrapper[4888]: E1201 19:35:05.450503 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.499968 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.500033 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.500058 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.500088 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.500113 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.602658 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.602693 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.602712 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.602727 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.602737 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.704835 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.704894 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.704914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.704935 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.704950 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.807157 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.807226 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.807244 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.807271 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.807286 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.909901 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.909947 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.909955 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.909970 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:05 crc kubenswrapper[4888]: I1201 19:35:05.909983 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:05Z","lastTransitionTime":"2025-12-01T19:35:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.012209 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.012242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.012252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.012264 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.012272 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.114142 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.114196 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.114207 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.114222 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.114233 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.216681 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.216727 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.216738 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.216753 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.216763 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.318786 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.318818 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.318828 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.318843 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.318854 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.420770 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.420804 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.420813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.420826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.420835 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.450377 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:06 crc kubenswrapper[4888]: E1201 19:35:06.450493 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.450395 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:06 crc kubenswrapper[4888]: E1201 19:35:06.450575 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.450377 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:06 crc kubenswrapper[4888]: E1201 19:35:06.450651 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.523153 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.523231 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.523243 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.523258 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.523268 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.625320 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.625433 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.625449 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.625473 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.625489 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.728555 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.729352 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.729368 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.729390 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.729425 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.832103 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.832150 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.832175 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.832242 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.832257 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.935470 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.935524 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.935538 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.935558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:06 crc kubenswrapper[4888]: I1201 19:35:06.935572 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:06Z","lastTransitionTime":"2025-12-01T19:35:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.039118 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.039201 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.039217 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.039245 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.039260 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.142294 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.142360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.142378 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.142410 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.142430 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.245291 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.245380 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.245400 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.245431 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.245450 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.347506 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.347547 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.347558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.347573 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.347582 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.449871 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.449914 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.449925 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.449943 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.449954 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.450200 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:07 crc kubenswrapper[4888]: E1201 19:35:07.450310 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.553468 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.553523 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.553537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.553558 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.553570 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.656333 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.656370 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.656378 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.656394 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.656403 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.761926 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.761990 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.762007 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.762031 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.762058 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.865645 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.865681 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.865691 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.865704 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.865714 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.969435 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.969468 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.969476 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.969489 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:07 crc kubenswrapper[4888]: I1201 19:35:07.969499 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:07Z","lastTransitionTime":"2025-12-01T19:35:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.071754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.071794 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.071806 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.071822 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.071833 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.174215 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.174252 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.174261 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.174276 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.174287 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.276772 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.276802 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.276810 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.276824 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.276832 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.379417 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.379458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.379466 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.379481 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.379491 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.450244 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.450249 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.450461 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:08 crc kubenswrapper[4888]: E1201 19:35:08.450593 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:08 crc kubenswrapper[4888]: E1201 19:35:08.450836 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:08 crc kubenswrapper[4888]: E1201 19:35:08.450699 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.480976 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.481094 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.481104 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.481117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.481126 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.583996 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.584039 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.584050 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.584064 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.584073 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.686619 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.686668 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.686683 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.686699 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.686710 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.788584 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.788649 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.788661 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.788681 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.788693 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.891117 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.891177 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.891206 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.891232 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.891245 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.994173 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.994526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.994656 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.994754 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:08 crc kubenswrapper[4888]: I1201 19:35:08.994867 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:08Z","lastTransitionTime":"2025-12-01T19:35:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.097300 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.097383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.097401 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.097425 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.097441 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.200467 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.200557 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.200582 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.200618 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.200644 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.304023 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.304096 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.304111 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.304144 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.304156 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.406488 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.406579 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.406603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.406634 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.406655 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.450855 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:09 crc kubenswrapper[4888]: E1201 19:35:09.451047 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.510458 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.510509 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.510520 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.510537 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.510550 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.612769 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.612805 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.612813 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.612826 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.612836 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.715212 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.715253 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.715265 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.715281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.715292 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.818793 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.818887 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.818906 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.818931 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.818951 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.921321 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.921358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.921367 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.921383 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:09 crc kubenswrapper[4888]: I1201 19:35:09.921392 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:09Z","lastTransitionTime":"2025-12-01T19:35:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.023844 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.023882 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.023892 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.023908 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.023917 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.126445 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.126492 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.126502 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.126519 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.126532 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.233636 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.233714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.233737 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.233768 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.233790 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.336840 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.336919 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.336935 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.336989 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.337008 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.439287 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.439329 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.439341 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.439360 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.439375 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.450731 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.450757 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.450827 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:10 crc kubenswrapper[4888]: E1201 19:35:10.450958 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:10 crc kubenswrapper[4888]: E1201 19:35:10.451123 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:10 crc kubenswrapper[4888]: E1201 19:35:10.451357 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.485570 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=19.485534834 podStartE2EDuration="19.485534834s" podCreationTimestamp="2025-12-01 19:34:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.468621372 +0000 UTC m=+110.339651296" watchObservedRunningTime="2025-12-01 19:35:10.485534834 +0000 UTC m=+110.356564748" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.500705 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=90.500689256 podStartE2EDuration="1m30.500689256s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.500208202 +0000 UTC m=+110.371238116" watchObservedRunningTime="2025-12-01 19:35:10.500689256 +0000 UTC m=+110.371719170" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.521334 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=91.521290943 podStartE2EDuration="1m31.521290943s" podCreationTimestamp="2025-12-01 19:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.518095822 +0000 UTC m=+110.389125756" watchObservedRunningTime="2025-12-01 19:35:10.521290943 +0000 UTC m=+110.392320857" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.543959 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.544020 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.544029 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.544043 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.544052 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.560699 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podStartSLOduration=90.560664376 podStartE2EDuration="1m30.560664376s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.560162881 +0000 UTC m=+110.431192785" watchObservedRunningTime="2025-12-01 19:35:10.560664376 +0000 UTC m=+110.431694290" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.560950 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-kjkx6" podStartSLOduration=90.560942854 podStartE2EDuration="1m30.560942854s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.547460479 +0000 UTC m=+110.418490393" watchObservedRunningTime="2025-12-01 19:35:10.560942854 +0000 UTC m=+110.431972768" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.591778 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-sttz9" podStartSLOduration=90.591760962 podStartE2EDuration="1m30.591760962s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.573062919 +0000 UTC m=+110.444092843" watchObservedRunningTime="2025-12-01 19:35:10.591760962 +0000 UTC m=+110.462790876" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.591896 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=60.591891246 podStartE2EDuration="1m0.591891246s" podCreationTimestamp="2025-12-01 19:34:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.591525285 +0000 UTC m=+110.462555199" watchObservedRunningTime="2025-12-01 19:35:10.591891246 +0000 UTC m=+110.462921160" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.615509 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=32.615491138 podStartE2EDuration="32.615491138s" podCreationTimestamp="2025-12-01 19:34:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.614527711 +0000 UTC m=+110.485557625" watchObservedRunningTime="2025-12-01 19:35:10.615491138 +0000 UTC m=+110.486521052" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.646493 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.646528 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.646541 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.646556 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.646567 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.664216 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hfpdh" podStartSLOduration=90.664198777 podStartE2EDuration="1m30.664198777s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.638999889 +0000 UTC m=+110.510029803" watchObservedRunningTime="2025-12-01 19:35:10.664198777 +0000 UTC m=+110.535228691" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.674816 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-tqpk6" podStartSLOduration=90.674793169 podStartE2EDuration="1m30.674793169s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.674377567 +0000 UTC m=+110.545407491" watchObservedRunningTime="2025-12-01 19:35:10.674793169 +0000 UTC m=+110.545823103" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.743524 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-c5qc6" podStartSLOduration=90.743510748 podStartE2EDuration="1m30.743510748s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:10.743162518 +0000 UTC m=+110.614192432" watchObservedRunningTime="2025-12-01 19:35:10.743510748 +0000 UTC m=+110.614540662" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.748603 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.748818 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.748890 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.748954 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.749015 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.851079 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.851174 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.851228 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.851255 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.851273 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.954135 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.954374 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.954391 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.954412 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:10 crc kubenswrapper[4888]: I1201 19:35:10.954424 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:10Z","lastTransitionTime":"2025-12-01T19:35:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.056972 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.057018 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.057028 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.057046 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.057057 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.159487 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.159720 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.159737 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.159758 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.159771 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.262760 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.262815 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.262831 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.262852 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.262868 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.364907 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.364962 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.364978 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.365000 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.365014 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.450965 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:11 crc kubenswrapper[4888]: E1201 19:35:11.451088 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.467449 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.467496 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.467508 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.467526 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.467539 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.572741 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.572790 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.572802 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.572819 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.572836 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.675263 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.675327 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.675338 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.675358 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.675373 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.777714 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.777755 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.777763 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.777778 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.777790 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.834457 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.834530 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.834540 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.834557 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.834574 4888 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T19:35:11Z","lastTransitionTime":"2025-12-01T19:35:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.883670 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt"] Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.884280 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.886007 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.887854 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.887994 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.888115 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.952623 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.952705 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.952732 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.952775 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:11 crc kubenswrapper[4888]: I1201 19:35:11.952803 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053703 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053759 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053776 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053812 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053828 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053881 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.053947 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.054636 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.059384 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.080350 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ce3eec9e-5fb8-4f1a-a014-f497dadc1d88-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rtzqt\" (UID: \"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.198905 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" Dec 01 19:35:12 crc kubenswrapper[4888]: W1201 19:35:12.211250 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce3eec9e_5fb8_4f1a_a014_f497dadc1d88.slice/crio-960757b734a0753bcff0f9eb27655aa41d555860de950af89eb52a8dca38650b WatchSource:0}: Error finding container 960757b734a0753bcff0f9eb27655aa41d555860de950af89eb52a8dca38650b: Status 404 returned error can't find the container with id 960757b734a0753bcff0f9eb27655aa41d555860de950af89eb52a8dca38650b Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.450544 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.450615 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.450627 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:12 crc kubenswrapper[4888]: E1201 19:35:12.450703 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:12 crc kubenswrapper[4888]: E1201 19:35:12.450895 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:12 crc kubenswrapper[4888]: E1201 19:35:12.450954 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.985136 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" event={"ID":"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88","Type":"ContainerStarted","Data":"0c5a5334ca24cc9cf4fa155ca174bf008d3d0f4618066672c7296a168815f7f7"} Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.985262 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" event={"ID":"ce3eec9e-5fb8-4f1a-a014-f497dadc1d88","Type":"ContainerStarted","Data":"960757b734a0753bcff0f9eb27655aa41d555860de950af89eb52a8dca38650b"} Dec 01 19:35:12 crc kubenswrapper[4888]: I1201 19:35:12.998043 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rtzqt" podStartSLOduration=92.998019595 podStartE2EDuration="1m32.998019595s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:12.997209971 +0000 UTC m=+112.868239885" watchObservedRunningTime="2025-12-01 19:35:12.998019595 +0000 UTC m=+112.869049509" Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.451587 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:13 crc kubenswrapper[4888]: E1201 19:35:13.451779 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.989872 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/1.log" Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.990458 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/0.log" Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.990508 4888 generic.go:334] "Generic (PLEG): container finished" podID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" containerID="a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15" exitCode=1 Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.990554 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerDied","Data":"a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15"} Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.990625 4888 scope.go:117] "RemoveContainer" containerID="4244b41e4d9ec826aac238b6c280f5a2c5362abe1a07abe9f101fcaf78a6357a" Dec 01 19:35:13 crc kubenswrapper[4888]: I1201 19:35:13.991069 4888 scope.go:117] "RemoveContainer" containerID="a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15" Dec 01 19:35:13 crc kubenswrapper[4888]: E1201 19:35:13.991404 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-hfpdh_openshift-multus(08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6)\"" pod="openshift-multus/multus-hfpdh" podUID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" Dec 01 19:35:14 crc kubenswrapper[4888]: I1201 19:35:14.451133 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:14 crc kubenswrapper[4888]: I1201 19:35:14.451157 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:14 crc kubenswrapper[4888]: I1201 19:35:14.451223 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:14 crc kubenswrapper[4888]: E1201 19:35:14.451975 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:14 crc kubenswrapper[4888]: E1201 19:35:14.452102 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:14 crc kubenswrapper[4888]: E1201 19:35:14.452290 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:14 crc kubenswrapper[4888]: I1201 19:35:14.994586 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/1.log" Dec 01 19:35:15 crc kubenswrapper[4888]: I1201 19:35:15.451056 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:15 crc kubenswrapper[4888]: E1201 19:35:15.451265 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:16 crc kubenswrapper[4888]: I1201 19:35:16.451293 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:16 crc kubenswrapper[4888]: I1201 19:35:16.451315 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:16 crc kubenswrapper[4888]: E1201 19:35:16.451414 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:16 crc kubenswrapper[4888]: I1201 19:35:16.451451 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:16 crc kubenswrapper[4888]: E1201 19:35:16.451582 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:16 crc kubenswrapper[4888]: E1201 19:35:16.451868 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:17 crc kubenswrapper[4888]: I1201 19:35:17.451106 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:17 crc kubenswrapper[4888]: E1201 19:35:17.451250 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:18 crc kubenswrapper[4888]: I1201 19:35:18.451162 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:18 crc kubenswrapper[4888]: I1201 19:35:18.451265 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:18 crc kubenswrapper[4888]: E1201 19:35:18.451578 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:18 crc kubenswrapper[4888]: I1201 19:35:18.451396 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:18 crc kubenswrapper[4888]: E1201 19:35:18.452468 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:18 crc kubenswrapper[4888]: E1201 19:35:18.452646 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:18 crc kubenswrapper[4888]: I1201 19:35:18.453040 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.007514 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/3.log" Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.009694 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerStarted","Data":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.010084 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.042207 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podStartSLOduration=99.04216992 podStartE2EDuration="1m39.04216992s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:19.041002487 +0000 UTC m=+118.912032391" watchObservedRunningTime="2025-12-01 19:35:19.04216992 +0000 UTC m=+118.913199834" Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.195649 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gb7nn"] Dec 01 19:35:19 crc kubenswrapper[4888]: I1201 19:35:19.195830 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:19 crc kubenswrapper[4888]: E1201 19:35:19.195938 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:20 crc kubenswrapper[4888]: E1201 19:35:20.395213 4888 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 01 19:35:20 crc kubenswrapper[4888]: I1201 19:35:20.450441 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:20 crc kubenswrapper[4888]: I1201 19:35:20.450513 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:20 crc kubenswrapper[4888]: I1201 19:35:20.451897 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:20 crc kubenswrapper[4888]: E1201 19:35:20.451897 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:20 crc kubenswrapper[4888]: E1201 19:35:20.452006 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:20 crc kubenswrapper[4888]: E1201 19:35:20.452108 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:20 crc kubenswrapper[4888]: E1201 19:35:20.536652 4888 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 19:35:21 crc kubenswrapper[4888]: I1201 19:35:21.450675 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:21 crc kubenswrapper[4888]: E1201 19:35:21.450848 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:22 crc kubenswrapper[4888]: I1201 19:35:22.450532 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:22 crc kubenswrapper[4888]: I1201 19:35:22.450559 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:22 crc kubenswrapper[4888]: E1201 19:35:22.450653 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:22 crc kubenswrapper[4888]: E1201 19:35:22.450718 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:22 crc kubenswrapper[4888]: I1201 19:35:22.450730 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:22 crc kubenswrapper[4888]: E1201 19:35:22.450863 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:23 crc kubenswrapper[4888]: I1201 19:35:23.450794 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:23 crc kubenswrapper[4888]: E1201 19:35:23.451005 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:24 crc kubenswrapper[4888]: I1201 19:35:24.451017 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:24 crc kubenswrapper[4888]: I1201 19:35:24.451059 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:24 crc kubenswrapper[4888]: I1201 19:35:24.451093 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:24 crc kubenswrapper[4888]: E1201 19:35:24.451179 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:24 crc kubenswrapper[4888]: E1201 19:35:24.451315 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:24 crc kubenswrapper[4888]: E1201 19:35:24.451377 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:25 crc kubenswrapper[4888]: I1201 19:35:25.451131 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:25 crc kubenswrapper[4888]: E1201 19:35:25.451331 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:25 crc kubenswrapper[4888]: E1201 19:35:25.538517 4888 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 19:35:26 crc kubenswrapper[4888]: I1201 19:35:26.450810 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:26 crc kubenswrapper[4888]: I1201 19:35:26.450922 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:26 crc kubenswrapper[4888]: E1201 19:35:26.450953 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:26 crc kubenswrapper[4888]: I1201 19:35:26.451012 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:26 crc kubenswrapper[4888]: E1201 19:35:26.451126 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:26 crc kubenswrapper[4888]: E1201 19:35:26.451384 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:26 crc kubenswrapper[4888]: I1201 19:35:26.451558 4888 scope.go:117] "RemoveContainer" containerID="a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15" Dec 01 19:35:27 crc kubenswrapper[4888]: I1201 19:35:27.036394 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/1.log" Dec 01 19:35:27 crc kubenswrapper[4888]: I1201 19:35:27.036690 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerStarted","Data":"05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730"} Dec 01 19:35:27 crc kubenswrapper[4888]: I1201 19:35:27.450847 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:27 crc kubenswrapper[4888]: E1201 19:35:27.451397 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:28 crc kubenswrapper[4888]: I1201 19:35:28.450783 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:28 crc kubenswrapper[4888]: I1201 19:35:28.450783 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:28 crc kubenswrapper[4888]: I1201 19:35:28.450799 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:28 crc kubenswrapper[4888]: E1201 19:35:28.451175 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:28 crc kubenswrapper[4888]: E1201 19:35:28.451483 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:28 crc kubenswrapper[4888]: E1201 19:35:28.451374 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:29 crc kubenswrapper[4888]: I1201 19:35:29.450367 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:29 crc kubenswrapper[4888]: E1201 19:35:29.450534 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gb7nn" podUID="4a71b974-d433-46e2-904d-2d955ba74014" Dec 01 19:35:30 crc kubenswrapper[4888]: I1201 19:35:30.450552 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:30 crc kubenswrapper[4888]: I1201 19:35:30.450662 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:30 crc kubenswrapper[4888]: E1201 19:35:30.451974 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 19:35:30 crc kubenswrapper[4888]: I1201 19:35:30.452031 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:30 crc kubenswrapper[4888]: E1201 19:35:30.452164 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 19:35:30 crc kubenswrapper[4888]: E1201 19:35:30.452290 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 19:35:31 crc kubenswrapper[4888]: I1201 19:35:31.450455 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:35:31 crc kubenswrapper[4888]: I1201 19:35:31.453363 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 19:35:31 crc kubenswrapper[4888]: I1201 19:35:31.454912 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.221281 4888 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.266502 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2w272"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.267023 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.270939 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.271035 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.271274 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.271375 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.271531 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.271776 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.281644 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.282231 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.284144 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.284946 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.289307 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.291622 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.293456 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.293997 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.306096 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.306733 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.306562 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.307361 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.307958 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308105 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308263 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308366 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r9km8"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308707 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308844 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308892 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309149 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309314 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309480 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309526 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309626 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309734 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309855 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.309751 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308348 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.308313 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.310625 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.310012 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.310988 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.314600 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.315599 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.315432 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s8fd6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.315393 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.316559 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.317541 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.323476 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.324051 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4k97"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.324760 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.325250 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.325787 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.325988 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.326447 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.326504 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.325800 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.326859 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.327260 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.332069 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.332624 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.333030 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.333042 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.333819 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.333935 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.334078 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.334700 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335242 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335475 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335590 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335706 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335836 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.335941 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336118 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336274 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336309 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336399 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336446 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336504 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336614 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336695 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336777 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337026 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337040 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337157 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337249 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337294 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337251 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337409 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337511 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.336509 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.337673 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.338770 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.338816 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.339264 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.339344 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.340781 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.341244 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.341872 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.342038 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.343461 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.344120 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.345415 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.345999 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346379 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346411 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346473 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346545 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346609 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346652 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346800 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346869 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346977 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.347026 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.350882 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.350915 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346809 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.346391 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.358388 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7gg6f"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.360430 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.361441 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.361480 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.367673 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369395 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz8k6\" (UniqueName: \"kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369436 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4thtq\" (UniqueName: \"kubernetes.io/projected/f325c412-68ff-4735-a4fb-c5d2183d0401-kube-api-access-4thtq\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369459 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-audit\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369478 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369493 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369508 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvscn\" (UniqueName: \"kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369525 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-encryption-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369542 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-client\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369568 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369584 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-trusted-ca\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369602 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-serving-cert\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369615 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369632 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369648 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369667 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-serving-cert\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369685 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-node-pullsecrets\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369710 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-config\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369730 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369749 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d43d2671-5cdc-4203-9956-1de5f480a3cc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369769 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-image-import-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369783 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369800 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6j2x\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-kube-api-access-f6j2x\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369815 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369831 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369846 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369863 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369883 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369909 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369931 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d43d2671-5cdc-4203-9956-1de5f480a3cc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369961 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.369980 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370011 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trjqk\" (UniqueName: \"kubernetes.io/projected/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-kube-api-access-trjqk\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370035 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-audit-dir\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370058 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370083 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x25vz\" (UniqueName: \"kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370310 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370650 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.370861 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.371600 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.371684 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.376360 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.376927 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.378474 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.378581 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.384540 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.384981 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.385255 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.385349 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.385525 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.385706 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.386009 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.386585 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.387527 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.387805 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.394271 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.397061 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.397727 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g4ddt"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.398311 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-gd6ps"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.398577 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.398674 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.398594 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.399789 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.399992 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.400521 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.401416 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.401647 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.402169 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.403255 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.409889 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.417977 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.419928 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.420317 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-wzxcq"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.421250 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.421829 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.422642 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.423734 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.424646 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.435675 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mdtrk"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.437020 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.439260 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.441756 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.442663 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2w272"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.445688 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.448690 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.448778 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.449418 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.450388 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.450535 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.451741 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.452022 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.467236 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.467859 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.468279 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vdkm7"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.468795 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t425v"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.468941 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.469060 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.469132 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.469637 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.470102 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-7ggph"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.470592 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.470613 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hpngg"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.470925 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471241 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471257 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471346 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471462 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471520 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471655 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.471842 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472172 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472217 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472239 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5f780c25-3451-47ee-9c35-60d649683350-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472265 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472284 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trjqk\" (UniqueName: \"kubernetes.io/projected/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-kube-api-access-trjqk\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472299 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-config\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472319 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0105ed18-666e-4e81-aaf5-8e63ba162602-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472338 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472354 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-audit-dir\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472369 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x25vz\" (UniqueName: \"kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472384 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472399 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmgwb\" (UniqueName: \"kubernetes.io/projected/5caa088d-82b7-45f9-b540-57fa54882521-kube-api-access-wmgwb\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472414 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b57ff58-42f5-48ae-8637-aa879cf54dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.472429 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz8k6\" (UniqueName: \"kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.473378 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.473440 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-audit-dir\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.473879 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474360 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474409 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhh85\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-kube-api-access-dhh85\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474435 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7671338d-c43b-437d-853a-3759b0a10d95-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474457 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmldm\" (UniqueName: \"kubernetes.io/projected/7671338d-c43b-437d-853a-3759b0a10d95-kube-api-access-xmldm\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474487 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xmq8\" (UniqueName: \"kubernetes.io/projected/0105ed18-666e-4e81-aaf5-8e63ba162602-kube-api-access-5xmq8\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474515 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a816c87b-de2f-4a92-a981-3808e168b282-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474555 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4thtq\" (UniqueName: \"kubernetes.io/projected/f325c412-68ff-4735-a4fb-c5d2183d0401-kube-api-access-4thtq\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474585 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-encryption-config\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474607 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdbx9\" (UniqueName: \"kubernetes.io/projected/50b1e183-9a9a-4daa-a769-78bc53d20c41-kube-api-access-bdbx9\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474628 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-audit\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474647 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474663 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474679 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86a0fb84-d4de-443a-86fb-273acb4138d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474696 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474711 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvscn\" (UniqueName: \"kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474728 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474748 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-encryption-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474763 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0022a5ef-fe10-4344-824c-75462ac971c5-machine-approver-tls\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474778 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a816c87b-de2f-4a92-a981-3808e168b282-config\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474795 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rgt8\" (UniqueName: \"kubernetes.io/projected/86a0fb84-d4de-443a-86fb-273acb4138d0-kube-api-access-6rgt8\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474811 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474826 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a816c87b-de2f-4a92-a981-3808e168b282-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474856 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474873 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-trusted-ca\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474886 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-client\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474903 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr2mp\" (UniqueName: \"kubernetes.io/projected/60a42fba-5e64-4a68-a9a3-e29ff836d97f-kube-api-access-qr2mp\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474926 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/60a42fba-5e64-4a68-a9a3-e29ff836d97f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474943 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5caa088d-82b7-45f9-b540-57fa54882521-metrics-tls\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474960 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-serving-cert\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474977 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-images\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.474995 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhrtx\" (UniqueName: \"kubernetes.io/projected/0022a5ef-fe10-4344-824c-75462ac971c5-kube-api-access-lhrtx\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475013 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475029 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475045 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475062 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-serving-cert\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475077 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-node-pullsecrets\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475093 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gncfc\" (UniqueName: \"kubernetes.io/projected/5f780c25-3451-47ee-9c35-60d649683350-kube-api-access-gncfc\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475109 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475126 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b57ff58-42f5-48ae-8637-aa879cf54dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475149 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-config\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475165 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-dir\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475225 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475247 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d43d2671-5cdc-4203-9956-1de5f480a3cc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475263 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-image-import-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475279 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-serving-cert\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475293 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvdmp\" (UniqueName: \"kubernetes.io/projected/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-kube-api-access-qvdmp\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475347 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475367 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6j2x\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-kube-api-access-f6j2x\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475391 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475413 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475432 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475449 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/86a0fb84-d4de-443a-86fb-273acb4138d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475466 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5f780c25-3451-47ee-9c35-60d649683350-proxy-tls\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475488 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475508 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475526 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-client\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475546 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475568 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d43d2671-5cdc-4203-9956-1de5f480a3cc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475583 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475600 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475600 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475616 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50b1e183-9a9a-4daa-a769-78bc53d20c41-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475648 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475663 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-auth-proxy-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475681 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475698 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4ft4\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475713 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-policies\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.475727 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7671338d-c43b-437d-853a-3759b0a10d95-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.476400 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-audit\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.476435 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.477159 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-config\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.477278 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.478161 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.478214 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.478639 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.478645 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:32.978620718 +0000 UTC m=+132.849650632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.478947 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.479898 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d43d2671-5cdc-4203-9956-1de5f480a3cc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.480044 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.480664 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-trusted-ca-bundle\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.480786 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.482070 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-client\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.482082 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.482462 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f325c412-68ff-4735-a4fb-c5d2183d0401-node-pullsecrets\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.482705 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.483016 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-trusted-ca\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.483877 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-image-import-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.483968 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.484260 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-serving-cert\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.484470 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d43d2671-5cdc-4203-9956-1de5f480a3cc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.484547 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f325c412-68ff-4735-a4fb-c5d2183d0401-etcd-serving-ca\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.485137 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-encryption-config\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.485445 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.485481 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f325c412-68ff-4735-a4fb-c5d2183d0401-serving-cert\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.485741 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.486786 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.488988 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.489323 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.490387 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.491654 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4k97"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.492909 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gd6ps"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.495623 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.498329 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r9km8"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.500371 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vdkm7"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.503681 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g4ddt"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.505009 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-b9tj2"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.505860 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.506405 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9zw95"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.508699 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.508845 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.511456 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.517446 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mdtrk"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.520533 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.521686 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hpngg"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.522723 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.524081 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t425v"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.525147 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7gg6f"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.526883 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.528601 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.529362 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.529971 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7ggph"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.531559 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.533014 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.534166 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.535554 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.536772 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s8fd6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.537838 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.539242 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9zw95"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.539893 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.540964 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.542362 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.544015 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz"] Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.549515 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.569295 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576552 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.576712 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.076682104 +0000 UTC m=+132.947712018 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576763 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5caa088d-82b7-45f9-b540-57fa54882521-metrics-tls\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576806 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-266nn\" (UniqueName: \"kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576832 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576857 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jh7x\" (UniqueName: \"kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576944 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.576976 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577027 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44qtp\" (UniqueName: \"kubernetes.io/projected/790b6a30-feee-4d83-9dca-ccc8116b0b8d-kube-api-access-44qtp\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577080 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577142 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-dir\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577224 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-serving-cert\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577247 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvdmp\" (UniqueName: \"kubernetes.io/projected/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-kube-api-access-qvdmp\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577269 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-dir\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577273 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2d3b1bf-96e4-4a85-9ddb-730b02687767-service-ca-bundle\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577409 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a040efc-7545-47b3-b66e-654b88099f0a-proxy-tls\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577436 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577469 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577490 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577524 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/86a0fb84-d4de-443a-86fb-273acb4138d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577541 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-service-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577562 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577583 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsx8h\" (UniqueName: \"kubernetes.io/projected/420bd4d7-068c-4b38-b065-0d93466de36f-kube-api-access-tsx8h\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.577824 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.077810346 +0000 UTC m=+132.948840350 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.577974 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/86a0fb84-d4de-443a-86fb-273acb4138d0-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578097 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578162 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578200 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jqb4\" (UniqueName: \"kubernetes.io/projected/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-kube-api-access-5jqb4\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578323 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q9x9\" (UniqueName: \"kubernetes.io/projected/18fe9152-c196-4b59-bb0c-b01057148a0f-kube-api-access-2q9x9\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578347 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578435 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kthvg\" (UniqueName: \"kubernetes.io/projected/0a040efc-7545-47b3-b66e-654b88099f0a-kube-api-access-kthvg\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578481 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578514 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4ft4\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578542 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-policies\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578570 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-registration-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578595 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578621 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578648 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578694 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-config\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578723 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8rcc\" (UniqueName: \"kubernetes.io/projected/6962c849-519b-40a6-a785-9428962b8e3c-kube-api-access-q8rcc\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578762 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578788 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-plugins-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578810 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-csi-data-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578837 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmgwb\" (UniqueName: \"kubernetes.io/projected/5caa088d-82b7-45f9-b540-57fa54882521-kube-api-access-wmgwb\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578852 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.578863 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b57ff58-42f5-48ae-8637-aa879cf54dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579145 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579357 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579390 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhh85\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-kube-api-access-dhh85\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579394 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-config\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579414 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmldm\" (UniqueName: \"kubernetes.io/projected/7671338d-c43b-437d-853a-3759b0a10d95-kube-api-access-xmldm\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579479 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xmq8\" (UniqueName: \"kubernetes.io/projected/0105ed18-666e-4e81-aaf5-8e63ba162602-kube-api-access-5xmq8\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579513 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdbx9\" (UniqueName: \"kubernetes.io/projected/50b1e183-9a9a-4daa-a769-78bc53d20c41-kube-api-access-bdbx9\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579557 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfh9k\" (UniqueName: \"kubernetes.io/projected/50de192e-a3d8-4fc9-94c6-ed727a3bffc1-kube-api-access-gfh9k\") pod \"downloads-7954f5f757-gd6ps\" (UID: \"50de192e-a3d8-4fc9-94c6-ed727a3bffc1\") " pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579583 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579611 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-service-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579707 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579724 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5caa088d-82b7-45f9-b540-57fa54882521-metrics-tls\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579739 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrcgr\" (UniqueName: \"kubernetes.io/projected/7773020b-602c-4890-a3f5-944b5ba4a9a2-kube-api-access-wrcgr\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579828 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/35cbe699-623b-44fa-8402-ea41fd3b9a8e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579858 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2hxv\" (UniqueName: \"kubernetes.io/projected/4f9ca160-603d-4864-a4e2-e52192731771-kube-api-access-j2hxv\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579877 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.579896 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580162 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580643 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-metrics-certs\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580679 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xk95\" (UniqueName: \"kubernetes.io/projected/35cbe699-623b-44fa-8402-ea41fd3b9a8e-kube-api-access-5xk95\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580718 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a816c87b-de2f-4a92-a981-3808e168b282-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580736 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-srv-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580751 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580770 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580786 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-mountpoint-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580803 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580801 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580836 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr2mp\" (UniqueName: \"kubernetes.io/projected/60a42fba-5e64-4a68-a9a3-e29ff836d97f-kube-api-access-qr2mp\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580839 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b57ff58-42f5-48ae-8637-aa879cf54dc5-trusted-ca\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580880 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580915 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mssl\" (UniqueName: \"kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580963 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr6pw\" (UniqueName: \"kubernetes.io/projected/b2d3b1bf-96e4-4a85-9ddb-730b02687767-kube-api-access-wr6pw\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.580991 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-serving-cert\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581017 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8x8s\" (UniqueName: \"kubernetes.io/projected/94df7fc9-9387-4d94-bc68-ba178504980e-kube-api-access-s8x8s\") pod \"migrator-59844c95c7-6mfmd\" (UID: \"94df7fc9-9387-4d94-bc68-ba178504980e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581050 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-etcd-client\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581073 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t7wz\" (UniqueName: \"kubernetes.io/projected/eed00975-3972-4bc7-aac6-11986d1d5a74-kube-api-access-6t7wz\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581096 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdwxc\" (UniqueName: \"kubernetes.io/projected/0e2c960f-af3d-4547-b034-c77d3598b887-kube-api-access-sdwxc\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581162 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581225 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581252 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-socket-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581311 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-images\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581392 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhrtx\" (UniqueName: \"kubernetes.io/projected/0022a5ef-fe10-4344-824c-75462ac971c5-kube-api-access-lhrtx\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581418 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-srv-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581479 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-serving-cert\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581503 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/420bd4d7-068c-4b38-b065-0d93466de36f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581558 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581587 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gncfc\" (UniqueName: \"kubernetes.io/projected/5f780c25-3451-47ee-9c35-60d649683350-kube-api-access-gncfc\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581622 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-serving-cert\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581641 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b57ff58-42f5-48ae-8637-aa879cf54dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581711 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581762 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5f780c25-3451-47ee-9c35-60d649683350-proxy-tls\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581784 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581816 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-client\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581838 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581875 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt6lk\" (UniqueName: \"kubernetes.io/projected/500c01ff-5b97-4c18-9080-f714a39ba531-kube-api-access-jt6lk\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.581902 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50b1e183-9a9a-4daa-a769-78bc53d20c41-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582272 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-config\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582307 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/420bd4d7-068c-4b38-b065-0d93466de36f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582345 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-auth-proxy-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582371 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0e2c960f-af3d-4547-b034-c77d3598b887-tmpfs\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582395 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582571 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7671338d-c43b-437d-853a-3759b0a10d95-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582607 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582654 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582679 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5f780c25-3451-47ee-9c35-60d649683350-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582694 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/60a42fba-5e64-4a68-a9a3-e29ff836d97f-images\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582703 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582730 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582811 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582848 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582869 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-config\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582890 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582933 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-profile-collector-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582952 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.582973 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0105ed18-666e-4e81-aaf5-8e63ba162602-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583004 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdhrh\" (UniqueName: \"kubernetes.io/projected/66a22559-1ea1-4cf3-86d5-2672504a1968-kube-api-access-pdhrh\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583198 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-images\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583218 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583253 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7671338d-c43b-437d-853a-3759b0a10d95-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583270 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a816c87b-de2f-4a92-a981-3808e168b282-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583289 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-encryption-config\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583309 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-default-certificate\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583325 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-stats-auth\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583394 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0022a5ef-fe10-4344-824c-75462ac971c5-auth-proxy-config\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583403 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tmhj\" (UniqueName: \"kubernetes.io/projected/929ef8f1-4b63-4c81-b892-6687da60b7f7-kube-api-access-2tmhj\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583423 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583442 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583483 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86a0fb84-d4de-443a-86fb-273acb4138d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583500 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-config\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583528 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583545 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0022a5ef-fe10-4344-824c-75462ac971c5-machine-approver-tls\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583561 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a816c87b-de2f-4a92-a981-3808e168b282-config\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.583634 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5f780c25-3451-47ee-9c35-60d649683350-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584120 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a816c87b-de2f-4a92-a981-3808e168b282-config\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584150 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l4b2\" (UniqueName: \"kubernetes.io/projected/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-kube-api-access-9l4b2\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584179 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rgt8\" (UniqueName: \"kubernetes.io/projected/86a0fb84-d4de-443a-86fb-273acb4138d0-kube-api-access-6rgt8\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584220 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584237 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584278 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584295 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82cd8\" (UniqueName: \"kubernetes.io/projected/a8e31e84-31ed-4549-81b7-7409c1f34c08-kube-api-access-82cd8\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584309 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584326 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584443 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584455 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.584468 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/60a42fba-5e64-4a68-a9a3-e29ff836d97f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.585498 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7671338d-c43b-437d-853a-3759b0a10d95-config\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.585602 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-audit-policies\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.585729 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.586234 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.586552 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.586772 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7671338d-c43b-437d-853a-3759b0a10d95-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.586961 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5f780c25-3451-47ee-9c35-60d649683350-proxy-tls\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.587138 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/60a42fba-5e64-4a68-a9a3-e29ff836d97f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.587494 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.589693 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3b57ff58-42f5-48ae-8637-aa879cf54dc5-metrics-tls\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.589734 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.589790 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/50b1e183-9a9a-4daa-a769-78bc53d20c41-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.589739 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a816c87b-de2f-4a92-a981-3808e168b282-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.590334 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-etcd-client\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.590527 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86a0fb84-d4de-443a-86fb-273acb4138d0-serving-cert\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.590809 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/0105ed18-666e-4e81-aaf5-8e63ba162602-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.591092 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0022a5ef-fe10-4344-824c-75462ac971c5-machine-approver-tls\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.592422 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-encryption-config\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.610306 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.629461 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.649058 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.668990 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685307 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685576 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-default-certificate\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685653 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-stats-auth\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685740 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tmhj\" (UniqueName: \"kubernetes.io/projected/929ef8f1-4b63-4c81-b892-6687da60b7f7-kube-api-access-2tmhj\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685810 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685876 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.685952 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-config\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686022 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l4b2\" (UniqueName: \"kubernetes.io/projected/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-kube-api-access-9l4b2\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686102 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686168 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686422 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686538 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686627 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82cd8\" (UniqueName: \"kubernetes.io/projected/a8e31e84-31ed-4549-81b7-7409c1f34c08-kube-api-access-82cd8\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686722 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686796 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686884 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-266nn\" (UniqueName: \"kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.686959 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687038 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jh7x\" (UniqueName: \"kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687109 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687198 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687307 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44qtp\" (UniqueName: \"kubernetes.io/projected/790b6a30-feee-4d83-9dca-ccc8116b0b8d-kube-api-access-44qtp\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687435 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687505 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687650 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2d3b1bf-96e4-4a85-9ddb-730b02687767-service-ca-bundle\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687760 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a040efc-7545-47b3-b66e-654b88099f0a-proxy-tls\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687886 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.687920 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.687948 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.187922405 +0000 UTC m=+133.058952319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688170 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-service-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688286 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688406 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsx8h\" (UniqueName: \"kubernetes.io/projected/420bd4d7-068c-4b38-b065-0d93466de36f-kube-api-access-tsx8h\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688508 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688616 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jqb4\" (UniqueName: \"kubernetes.io/projected/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-kube-api-access-5jqb4\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q9x9\" (UniqueName: \"kubernetes.io/projected/18fe9152-c196-4b59-bb0c-b01057148a0f-kube-api-access-2q9x9\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688754 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-service-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688843 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688896 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kthvg\" (UniqueName: \"kubernetes.io/projected/0a040efc-7545-47b3-b66e-654b88099f0a-kube-api-access-kthvg\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688921 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688952 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-registration-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.688984 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689008 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689056 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8rcc\" (UniqueName: \"kubernetes.io/projected/6962c849-519b-40a6-a785-9428962b8e3c-kube-api-access-q8rcc\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689081 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689102 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-plugins-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689123 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-csi-data-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689154 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689233 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfh9k\" (UniqueName: \"kubernetes.io/projected/50de192e-a3d8-4fc9-94c6-ed727a3bffc1-kube-api-access-gfh9k\") pod \"downloads-7954f5f757-gd6ps\" (UID: \"50de192e-a3d8-4fc9-94c6-ed727a3bffc1\") " pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689256 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-service-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689277 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689251 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-registration-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689298 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrcgr\" (UniqueName: \"kubernetes.io/projected/7773020b-602c-4890-a3f5-944b5ba4a9a2-kube-api-access-wrcgr\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689332 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/35cbe699-623b-44fa-8402-ea41fd3b9a8e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689355 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2hxv\" (UniqueName: \"kubernetes.io/projected/4f9ca160-603d-4864-a4e2-e52192731771-kube-api-access-j2hxv\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689376 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689388 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-csi-data-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689398 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689423 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xk95\" (UniqueName: \"kubernetes.io/projected/35cbe699-623b-44fa-8402-ea41fd3b9a8e-kube-api-access-5xk95\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689446 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-metrics-certs\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689482 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-srv-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689505 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689529 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-mountpoint-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689540 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-plugins-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689552 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689576 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8x8s\" (UniqueName: \"kubernetes.io/projected/94df7fc9-9387-4d94-bc68-ba178504980e-kube-api-access-s8x8s\") pod \"migrator-59844c95c7-6mfmd\" (UID: \"94df7fc9-9387-4d94-bc68-ba178504980e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689605 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689613 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689629 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mssl\" (UniqueName: \"kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689651 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr6pw\" (UniqueName: \"kubernetes.io/projected/b2d3b1bf-96e4-4a85-9ddb-730b02687767-kube-api-access-wr6pw\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689671 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-serving-cert\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689692 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-etcd-client\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689713 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t7wz\" (UniqueName: \"kubernetes.io/projected/eed00975-3972-4bc7-aac6-11986d1d5a74-kube-api-access-6t7wz\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689734 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-socket-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689757 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdwxc\" (UniqueName: \"kubernetes.io/projected/0e2c960f-af3d-4547-b034-c77d3598b887-kube-api-access-sdwxc\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689777 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689802 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689810 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689830 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-srv-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689856 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-serving-cert\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689877 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/420bd4d7-068c-4b38-b065-0d93466de36f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689898 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689938 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689963 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.689993 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690020 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt6lk\" (UniqueName: \"kubernetes.io/projected/500c01ff-5b97-4c18-9080-f714a39ba531-kube-api-access-jt6lk\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690041 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-config\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690063 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/420bd4d7-068c-4b38-b065-0d93466de36f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690083 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-service-ca-bundle\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690401 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-mountpoint-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690533 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0e2c960f-af3d-4547-b034-c77d3598b887-tmpfs\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690086 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0e2c960f-af3d-4547-b034-c77d3598b887-tmpfs\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690696 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690716 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690735 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690764 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690781 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690799 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-config\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690813 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690837 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-profile-collector-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690855 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690874 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdhrh\" (UniqueName: \"kubernetes.io/projected/66a22559-1ea1-4cf3-86d5-2672504a1968-kube-api-access-pdhrh\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.690892 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-images\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.691534 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-config\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.691568 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-config\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.691728 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/4f9ca160-603d-4864-a4e2-e52192731771-socket-dir\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.691838 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/420bd4d7-068c-4b38-b065-0d93466de36f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.693375 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-serving-cert\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.694310 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-serving-cert\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.694781 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6962c849-519b-40a6-a785-9428962b8e3c-etcd-client\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.695320 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/420bd4d7-068c-4b38-b065-0d93466de36f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.698164 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6962c849-519b-40a6-a785-9428962b8e3c-etcd-ca\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.710311 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.729054 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.741300 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.749809 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.769753 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.773857 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.792115 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.792670 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.29263767 +0000 UTC m=+133.163667794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.796341 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.805587 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.810865 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.813896 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.829885 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.844877 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.849215 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.854121 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.876220 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.882476 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.889769 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.893120 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.893221 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.393203206 +0000 UTC m=+133.264233120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.893350 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:32 crc kubenswrapper[4888]: E1201 19:35:32.893599 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.393592427 +0000 UTC m=+133.264622341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.910122 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.941268 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.949541 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.950095 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.951829 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.970553 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.972266 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.988796 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 19:35:32 crc kubenswrapper[4888]: I1201 19:35:32.993540 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.000631 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.001898 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.500780953 +0000 UTC m=+133.371810877 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.002626 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.003050 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.503029437 +0000 UTC m=+133.374059561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.010007 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.013312 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.030033 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.050371 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.069703 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.084308 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.089341 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.091761 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.103676 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.103839 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.603806025 +0000 UTC m=+133.474835969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.105075 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.105521 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.605507926 +0000 UTC m=+133.476537850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.111068 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.130126 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.151288 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.163910 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.171239 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.178634 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-config\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.190238 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.195498 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-srv-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.206395 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.206608 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.706564249 +0000 UTC m=+133.577594183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.206840 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.207206 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.707169087 +0000 UTC m=+133.578199001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.209772 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.230166 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.248896 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.255144 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/18fe9152-c196-4b59-bb0c-b01057148a0f-profile-collector-cert\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.255224 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-profile-collector-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.255854 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.270119 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.288678 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.292700 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0a040efc-7545-47b3-b66e-654b88099f0a-images\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.308849 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.308980 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.808949842 +0000 UTC m=+133.679979756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.309345 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.309751 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.809742736 +0000 UTC m=+133.680772650 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.310470 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.329680 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.343030 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0a040efc-7545-47b3-b66e-654b88099f0a-proxy-tls\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.349763 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.370661 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.390043 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.401986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-default-certificate\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.410750 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.411387 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.411644 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.911608373 +0000 UTC m=+133.782638287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.412050 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.412771 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:33.912746897 +0000 UTC m=+133.783776851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.421106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-stats-auth\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.427735 4888 request.go:700] Waited for 1.005958192s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/secrets?fieldSelector=metadata.name%3Drouter-metrics-certs-default&limit=500&resourceVersion=0 Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.429439 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.433275 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b2d3b1bf-96e4-4a85-9ddb-730b02687767-metrics-certs\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.450253 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.458684 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b2d3b1bf-96e4-4a85-9ddb-730b02687767-service-ca-bundle\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.470146 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.489919 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.493746 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e31e84-31ed-4549-81b7-7409c1f34c08-srv-cert\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.510310 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.513973 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.514228 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.014169411 +0000 UTC m=+133.885199335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.514880 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.515277 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.015262684 +0000 UTC m=+133.886292598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.528961 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.549406 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.589252 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.610002 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.615400 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/35cbe699-623b-44fa-8402-ea41fd3b9a8e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.615682 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.615815 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.11578906 +0000 UTC m=+133.986818974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.616268 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.616641 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.116629335 +0000 UTC m=+133.987659239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.629420 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.650198 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.669411 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.680604 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687745 4888 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687835 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config podName:f8d684ef-1323-46d4-aa9b-446ebdbd7d13 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.187813206 +0000 UTC m=+134.058843120 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config") pod "kube-storage-version-migrator-operator-b67b599dd-8zlm9" (UID: "f8d684ef-1323-46d4-aa9b-446ebdbd7d13") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687752 4888 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687905 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key podName:7773020b-602c-4890-a3f5-944b5ba4a9a2 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.187886088 +0000 UTC m=+134.058916022 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key") pod "service-ca-9c57cc56f-hpngg" (UID: "7773020b-602c-4890-a3f5-944b5ba4a9a2") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687752 4888 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687943 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert podName:500c01ff-5b97-4c18-9080-f714a39ba531 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.187935349 +0000 UTC m=+134.058965273 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert") pod "service-ca-operator-777779d784-hj2dp" (UID: "500c01ff-5b97-4c18-9080-f714a39ba531") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687982 4888 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.687983 4888 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.688008 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume podName:3ba0afc8-40bb-446f-be9d-4532fe287240 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.188000731 +0000 UTC m=+134.059030655 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume") pod "collect-profiles-29410290-4flfm" (UID: "3ba0afc8-40bb-446f-be9d-4532fe287240") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.688029 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config podName:500c01ff-5b97-4c18-9080-f714a39ba531 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.188015362 +0000 UTC m=+134.059045386 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config") pod "service-ca-operator-777779d784-hj2dp" (UID: "500c01ff-5b97-4c18-9080-f714a39ba531") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.688031 4888 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.688057 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls podName:eed00975-3972-4bc7-aac6-11986d1d5a74 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.188049843 +0000 UTC m=+134.059079767 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls") pod "dns-default-7ggph" (UID: "eed00975-3972-4bc7-aac6-11986d1d5a74") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.688973 4888 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.689007 4888 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.689076 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca podName:1a9245f3-0247-4dd6-b4c8-0658f524bc1c nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.189065293 +0000 UTC m=+134.060095207 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca") pod "marketplace-operator-79b997595-t425v" (UID: "1a9245f3-0247-4dd6-b4c8-0658f524bc1c") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.689131 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume podName:eed00975-3972-4bc7-aac6-11986d1d5a74 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.189118435 +0000 UTC m=+134.060148339 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume") pod "dns-default-7ggph" (UID: "eed00975-3972-4bc7-aac6-11986d1d5a74") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.689128 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690173 4888 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690312 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert podName:66a22559-1ea1-4cf3-86d5-2672504a1968 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.1902645 +0000 UTC m=+134.061294484 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert") pod "ingress-canary-vdkm7" (UID: "66a22559-1ea1-4cf3-86d5-2672504a1968") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690211 4888 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690409 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert podName:0e2c960f-af3d-4547-b034-c77d3598b887 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.190397204 +0000 UTC m=+134.061427218 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert") pod "packageserver-d55dfcdfc-7k5h6" (UID: "0e2c960f-af3d-4547-b034-c77d3598b887") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690693 4888 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.690738 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics podName:1a9245f3-0247-4dd6-b4c8-0658f524bc1c nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.190729004 +0000 UTC m=+134.061758918 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics") pod "marketplace-operator-79b997595-t425v" (UID: "1a9245f3-0247-4dd6-b4c8-0658f524bc1c") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691590 4888 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691609 4888 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691634 4888 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691643 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token podName:929ef8f1-4b63-4c81-b892-6687da60b7f7 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.191631011 +0000 UTC m=+134.062661005 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token") pod "machine-config-server-b9tj2" (UID: "929ef8f1-4b63-4c81-b892-6687da60b7f7") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691659 4888 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691661 4888 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691664 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs podName:929ef8f1-4b63-4c81-b892-6687da60b7f7 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.191655852 +0000 UTC m=+134.062685886 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs") pod "machine-config-server-b9tj2" (UID: "929ef8f1-4b63-4c81-b892-6687da60b7f7") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691711 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle podName:7773020b-602c-4890-a3f5-944b5ba4a9a2 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.191701653 +0000 UTC m=+134.062731567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle") pod "service-ca-9c57cc56f-hpngg" (UID: "7773020b-602c-4890-a3f5-944b5ba4a9a2") : failed to sync configmap cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691722 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert podName:0e2c960f-af3d-4547-b034-c77d3598b887 nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.191717064 +0000 UTC m=+134.062746978 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert") pod "packageserver-d55dfcdfc-7k5h6" (UID: "0e2c960f-af3d-4547-b034-c77d3598b887") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.691738 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert podName:790b6a30-feee-4d83-9dca-ccc8116b0b8d nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.191729024 +0000 UTC m=+134.062758938 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-rg7sz" (UID: "790b6a30-feee-4d83-9dca-ccc8116b0b8d") : failed to sync secret cache: timed out waiting for the condition Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.708888 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.716958 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.717058 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.217043419 +0000 UTC m=+134.088073323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.717302 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.718105 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.21809754 +0000 UTC m=+134.089127454 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.729811 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.749558 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.768822 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.789096 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.808674 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.818604 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.818750 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.31873218 +0000 UTC m=+134.189762084 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.819062 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.819595 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.319576076 +0000 UTC m=+134.190606100 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.829321 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.849898 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.869750 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.889689 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.910145 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.920084 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.920389 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.42035834 +0000 UTC m=+134.291388254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.920830 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:33 crc kubenswrapper[4888]: E1201 19:35:33.921172 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.421160314 +0000 UTC m=+134.292190228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.930326 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.949453 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.969428 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:35:33 crc kubenswrapper[4888]: I1201 19:35:33.988530 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.009485 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.021922 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.022137 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.522099593 +0000 UTC m=+134.393129547 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.022590 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.023101 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.523079423 +0000 UTC m=+134.394109427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.029076 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.049497 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.069122 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.090115 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.109968 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.123732 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.123925 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.623906129 +0000 UTC m=+134.494936063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.124237 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.124526 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.624513317 +0000 UTC m=+134.495543231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.129270 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.148931 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.169904 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.190323 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.210802 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.225495 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.225779 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.725745335 +0000 UTC m=+134.596775289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.225850 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.225919 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226035 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226141 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226269 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226342 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226431 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226703 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226911 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.226992 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227087 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.227167 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.727139437 +0000 UTC m=+134.598169461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227363 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227471 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227542 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227579 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227627 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227752 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227787 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.227900 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/500c01ff-5b97-4c18-9080-f714a39ba531-config\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.229130 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.229146 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.228979 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eed00975-3972-4bc7-aac6-11986d1d5a74-config-volume\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.230507 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-cabundle\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.233013 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7773020b-602c-4890-a3f5-944b5ba4a9a2-signing-key\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.233319 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/500c01ff-5b97-4c18-9080-f714a39ba531-serving-cert\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.233810 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/790b6a30-feee-4d83-9dca-ccc8116b0b8d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.233840 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-webhook-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.234774 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0e2c960f-af3d-4547-b034-c77d3598b887-apiservice-cert\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.240914 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66a22559-1ea1-4cf3-86d5-2672504a1968-cert\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.245504 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eed00975-3972-4bc7-aac6-11986d1d5a74-metrics-tls\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.256931 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.258786 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.269204 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.291757 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.309468 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.320578 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.328794 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.328954 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.828932142 +0000 UTC m=+134.699962056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.329357 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.329997 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.829985674 +0000 UTC m=+134.701015588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.347778 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trjqk\" (UniqueName: \"kubernetes.io/projected/f17546a6-7348-4a92-8ff1-1e02eb4bb02f-kube-api-access-trjqk\") pod \"console-operator-58897d9998-s8fd6\" (UID: \"f17546a6-7348-4a92-8ff1-1e02eb4bb02f\") " pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.366851 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x25vz\" (UniqueName: \"kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz\") pod \"controller-manager-879f6c89f-8tfrf\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.384887 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz8k6\" (UniqueName: \"kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6\") pod \"console-f9d7485db-bt5fw\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.403649 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4thtq\" (UniqueName: \"kubernetes.io/projected/f325c412-68ff-4735-a4fb-c5d2183d0401-kube-api-access-4thtq\") pod \"apiserver-76f77b778f-l4k97\" (UID: \"f325c412-68ff-4735-a4fb-c5d2183d0401\") " pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.423882 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvscn\" (UniqueName: \"kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn\") pod \"route-controller-manager-6576b87f9c-gdgdb\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.431306 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.431476 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.931453928 +0000 UTC m=+134.802483852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.431856 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.432405 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:34.932396846 +0000 UTC m=+134.803426750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.436494 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.447288 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.447898 4888 request.go:700] Waited for 1.969382359s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.462560 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6j2x\" (UniqueName: \"kubernetes.io/projected/d43d2671-5cdc-4203-9956-1de5f480a3cc-kube-api-access-f6j2x\") pod \"cluster-image-registry-operator-dc59b4c8b-6rj7j\" (UID: \"d43d2671-5cdc-4203-9956-1de5f480a3cc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.470056 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.489945 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.500986 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.502914 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-certs\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.509664 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.514374 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.574545 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.575514 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.576337 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.076305233 +0000 UTC m=+134.947335147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.580117 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/929ef8f1-4b63-4c81-b892-6687da60b7f7-node-bootstrap-token\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.581530 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.581881 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.584223 4888 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.613468 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvdmp\" (UniqueName: \"kubernetes.io/projected/0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb-kube-api-access-qvdmp\") pod \"apiserver-7bbb656c7d-skz8b\" (UID: \"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.620823 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.632369 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4ft4\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.644829 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.667267 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmgwb\" (UniqueName: \"kubernetes.io/projected/5caa088d-82b7-45f9-b540-57fa54882521-kube-api-access-wmgwb\") pod \"dns-operator-744455d44c-r9km8\" (UID: \"5caa088d-82b7-45f9-b540-57fa54882521\") " pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.678641 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.679232 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.179218852 +0000 UTC m=+135.050248756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.682401 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.688267 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmldm\" (UniqueName: \"kubernetes.io/projected/7671338d-c43b-437d-853a-3759b0a10d95-kube-api-access-xmldm\") pod \"openshift-apiserver-operator-796bbdcf4f-lkr2s\" (UID: \"7671338d-c43b-437d-853a-3759b0a10d95\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:34 crc kubenswrapper[4888]: W1201 19:35:34.694197 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaeb4ebff_81e9_4beb_b7fa_a800849d16ff.slice/crio-6af95cb375bf3f61f170ff5fa1b29f62d24cd60f4ad54149a8c79d5d9e42f1e7 WatchSource:0}: Error finding container 6af95cb375bf3f61f170ff5fa1b29f62d24cd60f4ad54149a8c79d5d9e42f1e7: Status 404 returned error can't find the container with id 6af95cb375bf3f61f170ff5fa1b29f62d24cd60f4ad54149a8c79d5d9e42f1e7 Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.711024 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xmq8\" (UniqueName: \"kubernetes.io/projected/0105ed18-666e-4e81-aaf5-8e63ba162602-kube-api-access-5xmq8\") pod \"cluster-samples-operator-665b6dd947-b74fz\" (UID: \"0105ed18-666e-4e81-aaf5-8e63ba162602\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.716433 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.730003 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdbx9\" (UniqueName: \"kubernetes.io/projected/50b1e183-9a9a-4daa-a769-78bc53d20c41-kube-api-access-bdbx9\") pod \"control-plane-machine-set-operator-78cbb6b69f-42k5p\" (UID: \"50b1e183-9a9a-4daa-a769-78bc53d20c41\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.743336 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhh85\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-kube-api-access-dhh85\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.762666 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.763032 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b57ff58-42f5-48ae-8637-aa879cf54dc5-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lmqwn\" (UID: \"3b57ff58-42f5-48ae-8637-aa879cf54dc5\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.779780 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.779971 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.780079 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.280060898 +0000 UTC m=+135.151090812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.780325 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.780789 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.280777739 +0000 UTC m=+135.151807653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.785678 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.786572 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr2mp\" (UniqueName: \"kubernetes.io/projected/60a42fba-5e64-4a68-a9a3-e29ff836d97f-kube-api-access-qr2mp\") pod \"machine-api-operator-5694c8668f-2w272\" (UID: \"60a42fba-5e64-4a68-a9a3-e29ff836d97f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:34 crc kubenswrapper[4888]: W1201 19:35:34.793490 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod340ce8b7_3ad7_46fb_a45e_b70641e8661c.slice/crio-ef6cfe4c993ff4026bf2a04a84b039f901c3071742164896768bdf5579d3c6a1 WatchSource:0}: Error finding container ef6cfe4c993ff4026bf2a04a84b039f901c3071742164896768bdf5579d3c6a1: Status 404 returned error can't find the container with id ef6cfe4c993ff4026bf2a04a84b039f901c3071742164896768bdf5579d3c6a1 Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.804898 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a816c87b-de2f-4a92-a981-3808e168b282-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rmlz\" (UID: \"a816c87b-de2f-4a92-a981-3808e168b282\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.810133 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.829510 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhrtx\" (UniqueName: \"kubernetes.io/projected/0022a5ef-fe10-4344-824c-75462ac971c5-kube-api-access-lhrtx\") pod \"machine-approver-56656f9798-zjc7f\" (UID: \"0022a5ef-fe10-4344-824c-75462ac971c5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.838955 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.846426 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s8fd6"] Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.854434 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gncfc\" (UniqueName: \"kubernetes.io/projected/5f780c25-3451-47ee-9c35-60d649683350-kube-api-access-gncfc\") pod \"machine-config-controller-84d6567774-tqvr6\" (UID: \"5f780c25-3451-47ee-9c35-60d649683350\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.863723 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rgt8\" (UniqueName: \"kubernetes.io/projected/86a0fb84-d4de-443a-86fb-273acb4138d0-kube-api-access-6rgt8\") pod \"openshift-config-operator-7777fb866f-p8s4h\" (UID: \"86a0fb84-d4de-443a-86fb-273acb4138d0\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.875673 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.878549 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-l4k97"] Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.881936 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.883098 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.382486831 +0000 UTC m=+135.253516755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.883500 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.883858 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.383835742 +0000 UTC m=+135.254865676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.906488 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jh7x\" (UniqueName: \"kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x\") pod \"oauth-openshift-558db77b4-5ksc9\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.927055 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82cd8\" (UniqueName: \"kubernetes.io/projected/a8e31e84-31ed-4549-81b7-7409c1f34c08-kube-api-access-82cd8\") pod \"catalog-operator-68c6474976-mck5b\" (UID: \"a8e31e84-31ed-4549-81b7-7409c1f34c08\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.929832 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.943753 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.946589 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-266nn\" (UniqueName: \"kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn\") pod \"collect-profiles-29410290-4flfm\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.950402 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s"] Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.950811 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.957004 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.964107 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.964451 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44qtp\" (UniqueName: \"kubernetes.io/projected/790b6a30-feee-4d83-9dca-ccc8116b0b8d-kube-api-access-44qtp\") pod \"package-server-manager-789f6589d5-rg7sz\" (UID: \"790b6a30-feee-4d83-9dca-ccc8116b0b8d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.984224 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.984858 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.985029 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.484999108 +0000 UTC m=+135.356029022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.985242 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:34 crc kubenswrapper[4888]: E1201 19:35:34.985601 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.485593156 +0000 UTC m=+135.356623070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:34 crc kubenswrapper[4888]: I1201 19:35:34.989580 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l4b2\" (UniqueName: \"kubernetes.io/projected/6cb31fa6-3d13-4ccb-98c6-2a56fd37e279-kube-api-access-9l4b2\") pod \"authentication-operator-69f744f599-7gg6f\" (UID: \"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.005859 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tmhj\" (UniqueName: \"kubernetes.io/projected/929ef8f1-4b63-4c81-b892-6687da60b7f7-kube-api-access-2tmhj\") pod \"machine-config-server-b9tj2\" (UID: \"929ef8f1-4b63-4c81-b892-6687da60b7f7\") " pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.025172 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsx8h\" (UniqueName: \"kubernetes.io/projected/420bd4d7-068c-4b38-b065-0d93466de36f-kube-api-access-tsx8h\") pod \"openshift-controller-manager-operator-756b6f6bc6-d644g\" (UID: \"420bd4d7-068c-4b38-b065-0d93466de36f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.035036 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.040501 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jqb4\" (UniqueName: \"kubernetes.io/projected/f8d684ef-1323-46d4-aa9b-446ebdbd7d13-kube-api-access-5jqb4\") pod \"kube-storage-version-migrator-operator-b67b599dd-8zlm9\" (UID: \"f8d684ef-1323-46d4-aa9b-446ebdbd7d13\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.053030 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.058470 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.063241 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" event={"ID":"340ce8b7-3ad7-46fb-a45e-b70641e8661c","Type":"ContainerStarted","Data":"ef6cfe4c993ff4026bf2a04a84b039f901c3071742164896768bdf5579d3c6a1"} Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.066685 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q9x9\" (UniqueName: \"kubernetes.io/projected/18fe9152-c196-4b59-bb0c-b01057148a0f-kube-api-access-2q9x9\") pod \"olm-operator-6b444d44fb-m99gv\" (UID: \"18fe9152-c196-4b59-bb0c-b01057148a0f\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.066686 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" event={"ID":"7671338d-c43b-437d-853a-3759b0a10d95","Type":"ContainerStarted","Data":"5cae361ad8d5c1e7cc7d70d3334f527aca42b92874129c5ba6f33c55694a0dc6"} Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.071396 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" event={"ID":"aeb4ebff-81e9-4beb-b7fa-a800849d16ff","Type":"ContainerStarted","Data":"6af95cb375bf3f61f170ff5fa1b29f62d24cd60f4ad54149a8c79d5d9e42f1e7"} Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.072742 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" event={"ID":"f17546a6-7348-4a92-8ff1-1e02eb4bb02f","Type":"ContainerStarted","Data":"dfef0fd6f69697a70df621db1c9e5ecbbb5134703fa3a9ba223d5eb913c2e98b"} Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.073899 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bt5fw" event={"ID":"9454739e-41f7-48f2-a9ad-8194e0a18251","Type":"ContainerStarted","Data":"78ff0a2027078e7fa0e124c54c499ae1ce20749d09981bec346c6f1aedfec53d"} Dec 01 19:35:35 crc kubenswrapper[4888]: W1201 19:35:35.078301 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e9db052_f4a6_4bf5_9dd7_5ea6ee940deb.slice/crio-a7647dbfae91696b70bc183cbeff907f0226fea0a3f86dac6ba973b77eb9955b WatchSource:0}: Error finding container a7647dbfae91696b70bc183cbeff907f0226fea0a3f86dac6ba973b77eb9955b: Status 404 returned error can't find the container with id a7647dbfae91696b70bc183cbeff907f0226fea0a3f86dac6ba973b77eb9955b Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.083159 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.085980 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.086347 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.586327529 +0000 UTC m=+135.457357433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.086469 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.086881 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kthvg\" (UniqueName: \"kubernetes.io/projected/0a040efc-7545-47b3-b66e-654b88099f0a-kube-api-access-kthvg\") pod \"machine-config-operator-74547568cd-tj6dq\" (UID: \"0a040efc-7545-47b3-b66e-654b88099f0a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.086972 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.586955848 +0000 UTC m=+135.457985762 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.106276 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.106866 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8rcc\" (UniqueName: \"kubernetes.io/projected/6962c849-519b-40a6-a785-9428962b8e3c-kube-api-access-q8rcc\") pod \"etcd-operator-b45778765-g4ddt\" (UID: \"6962c849-519b-40a6-a785-9428962b8e3c\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.123262 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.125838 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2hxv\" (UniqueName: \"kubernetes.io/projected/4f9ca160-603d-4864-a4e2-e52192731771-kube-api-access-j2hxv\") pod \"csi-hostpathplugin-9zw95\" (UID: \"4f9ca160-603d-4864-a4e2-e52192731771\") " pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.151108 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfh9k\" (UniqueName: \"kubernetes.io/projected/50de192e-a3d8-4fc9-94c6-ed727a3bffc1-kube-api-access-gfh9k\") pod \"downloads-7954f5f757-gd6ps\" (UID: \"50de192e-a3d8-4fc9-94c6-ed727a3bffc1\") " pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.160825 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.172084 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xk95\" (UniqueName: \"kubernetes.io/projected/35cbe699-623b-44fa-8402-ea41fd3b9a8e-kube-api-access-5xk95\") pod \"multus-admission-controller-857f4d67dd-mdtrk\" (UID: \"35cbe699-623b-44fa-8402-ea41fd3b9a8e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.185495 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5sqgg\" (UID: \"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.187449 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.187612 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.687592967 +0000 UTC m=+135.558622881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.187727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.188131 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.688117433 +0000 UTC m=+135.559147347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.194125 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-b9tj2" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.205108 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.210875 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8x8s\" (UniqueName: \"kubernetes.io/projected/94df7fc9-9387-4d94-bc68-ba178504980e-kube-api-access-s8x8s\") pod \"migrator-59844c95c7-6mfmd\" (UID: \"94df7fc9-9387-4d94-bc68-ba178504980e\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.213557 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.229985 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mssl\" (UniqueName: \"kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl\") pod \"marketplace-operator-79b997595-t425v\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.246168 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrcgr\" (UniqueName: \"kubernetes.io/projected/7773020b-602c-4890-a3f5-944b5ba4a9a2-kube-api-access-wrcgr\") pod \"service-ca-9c57cc56f-hpngg\" (UID: \"7773020b-602c-4890-a3f5-944b5ba4a9a2\") " pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.270840 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr6pw\" (UniqueName: \"kubernetes.io/projected/b2d3b1bf-96e4-4a85-9ddb-730b02687767-kube-api-access-wr6pw\") pod \"router-default-5444994796-wzxcq\" (UID: \"b2d3b1bf-96e4-4a85-9ddb-730b02687767\") " pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.271634 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.280759 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.285181 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt6lk\" (UniqueName: \"kubernetes.io/projected/500c01ff-5b97-4c18-9080-f714a39ba531-kube-api-access-jt6lk\") pod \"service-ca-operator-777779d784-hj2dp\" (UID: \"500c01ff-5b97-4c18-9080-f714a39ba531\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.289468 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.290443 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.790410333 +0000 UTC m=+135.661440247 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.296741 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r9km8"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.318632 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdwxc\" (UniqueName: \"kubernetes.io/projected/0e2c960f-af3d-4547-b034-c77d3598b887-kube-api-access-sdwxc\") pod \"packageserver-d55dfcdfc-7k5h6\" (UID: \"0e2c960f-af3d-4547-b034-c77d3598b887\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.319888 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.325979 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.338491 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2w272"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.346011 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fac68ebb-66d0-4ffa-b6a0-61f708fe70db-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rl64v\" (UID: \"fac68ebb-66d0-4ffa-b6a0-61f708fe70db\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.352163 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdhrh\" (UniqueName: \"kubernetes.io/projected/66a22559-1ea1-4cf3-86d5-2672504a1968-kube-api-access-pdhrh\") pod \"ingress-canary-vdkm7\" (UID: \"66a22559-1ea1-4cf3-86d5-2672504a1968\") " pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.352876 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.360769 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.369778 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.369825 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t7wz\" (UniqueName: \"kubernetes.io/projected/eed00975-3972-4bc7-aac6-11986d1d5a74-kube-api-access-6t7wz\") pod \"dns-default-7ggph\" (UID: \"eed00975-3972-4bc7-aac6-11986d1d5a74\") " pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.376177 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.391152 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.396776 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.398622 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:35.898597641 +0000 UTC m=+135.769627765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.399571 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.444288 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.453423 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vdkm7" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.466975 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.474585 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.484073 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.489062 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.504013 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.504391 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.004371996 +0000 UTC m=+135.875401910 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.504452 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.505138 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.005113118 +0000 UTC m=+135.876143032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.608134 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.608953 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.610024 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.610939 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.611098 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.111081459 +0000 UTC m=+135.982111373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.611374 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.611804 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.111784751 +0000 UTC m=+135.982814725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.660042 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.723872 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h"] Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.732248 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.732644 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.232626521 +0000 UTC m=+136.103656435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.833686 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.834257 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.33424017 +0000 UTC m=+136.205270084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.936905 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:35 crc kubenswrapper[4888]: E1201 19:35:35.937247 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.437229951 +0000 UTC m=+136.308259865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:35 crc kubenswrapper[4888]: I1201 19:35:35.983362 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p"] Dec 01 19:35:35 crc kubenswrapper[4888]: W1201 19:35:35.992901 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda816c87b_de2f_4a92_a981_3808e168b282.slice/crio-ff13b8d9674bee12bf61de3571794766ee7d10e99e55ba8dc1d2dd58f43ff5b0 WatchSource:0}: Error finding container ff13b8d9674bee12bf61de3571794766ee7d10e99e55ba8dc1d2dd58f43ff5b0: Status 404 returned error can't find the container with id ff13b8d9674bee12bf61de3571794766ee7d10e99e55ba8dc1d2dd58f43ff5b0 Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.040235 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.041491 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.041900 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.541885882 +0000 UTC m=+136.412915796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.091117 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.143055 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.143323 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.643293285 +0000 UTC m=+136.514323199 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.146142 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-b9tj2" event={"ID":"929ef8f1-4b63-4c81-b892-6687da60b7f7","Type":"ContainerStarted","Data":"a1e7d1657898956682c5ceebf969a3ea488aac97394775c7369f79b379d32501"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.155995 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bt5fw" event={"ID":"9454739e-41f7-48f2-a9ad-8194e0a18251","Type":"ContainerStarted","Data":"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.157709 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" event={"ID":"0022a5ef-fe10-4344-824c-75462ac971c5","Type":"ContainerStarted","Data":"05b19ff6cb31ae93d7650756e46cb7781b1b040869685d170fb4fc8d67dbecbd"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.157738 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" event={"ID":"0022a5ef-fe10-4344-824c-75462ac971c5","Type":"ContainerStarted","Data":"4aea146e25c2a89b839e8808cad7b2eea39cdc5f9f86bf20b558774c870854d6"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.160949 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" event={"ID":"5caa088d-82b7-45f9-b540-57fa54882521","Type":"ContainerStarted","Data":"375c2b967c5520896e9d3b3533404efdd49ec7f240358503d8650ed76852d72a"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.170320 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" event={"ID":"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb","Type":"ContainerStarted","Data":"a7647dbfae91696b70bc183cbeff907f0226fea0a3f86dac6ba973b77eb9955b"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.177115 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.177160 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.177171 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" event={"ID":"340ce8b7-3ad7-46fb-a45e-b70641e8661c","Type":"ContainerStarted","Data":"3bcf5dd215143992669d730a4d02125bc2c89920d85bd07920e7402ac6bafe0b"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.177539 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.183674 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wzxcq" event={"ID":"b2d3b1bf-96e4-4a85-9ddb-730b02687767","Type":"ContainerStarted","Data":"3986a2af4f7bb427e7c5918bf87164d58a4a87796b9d4d3506d79cbfc0c5b773"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.194138 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.194761 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" event={"ID":"7671338d-c43b-437d-853a-3759b0a10d95","Type":"ContainerStarted","Data":"c239948e52a38b1bfd48999f8fa68d4e1d1d996af24f6611976b800c3e867563"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.197104 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" event={"ID":"50b1e183-9a9a-4daa-a769-78bc53d20c41","Type":"ContainerStarted","Data":"8a2dd9d3b44f621d47f61e1952e3f808aeb525c1b9021fa340a0ff5b4b1add97"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.201949 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" event={"ID":"60a42fba-5e64-4a68-a9a3-e29ff836d97f","Type":"ContainerStarted","Data":"ad7bb9a5046fc74be00e14daf304b8a807e3f2d11683b5697dc597f05a8308d1"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.209851 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" event={"ID":"3b57ff58-42f5-48ae-8637-aa879cf54dc5","Type":"ContainerStarted","Data":"c554cb0803d9083d0e6214664de920f92adab2b5dc0b26878b357a7734745901"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.229045 4888 generic.go:334] "Generic (PLEG): container finished" podID="f325c412-68ff-4735-a4fb-c5d2183d0401" containerID="5835a712e9142a922d3400c8c088819f4459695add07d074170f6c8f9d8aafca" exitCode=0 Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.229262 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" event={"ID":"f325c412-68ff-4735-a4fb-c5d2183d0401","Type":"ContainerDied","Data":"5835a712e9142a922d3400c8c088819f4459695add07d074170f6c8f9d8aafca"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.229322 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" event={"ID":"f325c412-68ff-4735-a4fb-c5d2183d0401","Type":"ContainerStarted","Data":"b462d4048eedec9b908894c000d24e34109bfe77adfafa0045cca5df055624da"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.235803 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.240600 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" event={"ID":"86a0fb84-d4de-443a-86fb-273acb4138d0","Type":"ContainerStarted","Data":"90373b525708c21c7f39d611b314777ce038a237d0bae0b17f33296af65aee05"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.244416 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gd6ps"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.245404 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.248629 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" event={"ID":"d43d2671-5cdc-4203-9956-1de5f480a3cc","Type":"ContainerStarted","Data":"5ff40bca928180d62d4d6aed86816bce5b32408257f97afb3fcf5c9edd3c5cb5"} Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.249901 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.749877715 +0000 UTC m=+136.620907629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.270018 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g4ddt"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.278868 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" event={"ID":"a816c87b-de2f-4a92-a981-3808e168b282","Type":"ContainerStarted","Data":"ff13b8d9674bee12bf61de3571794766ee7d10e99e55ba8dc1d2dd58f43ff5b0"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.309851 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" event={"ID":"f17546a6-7348-4a92-8ff1-1e02eb4bb02f","Type":"ContainerStarted","Data":"88e59dae48a41eff574bc89524974b00479b0237982671a0ef81dc990b48d910"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.309926 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.322561 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" event={"ID":"0105ed18-666e-4e81-aaf5-8e63ba162602","Type":"ContainerStarted","Data":"88009595afda5f4b16c52222bbb273e77c2c4b9e039d25fab1b6f32351b385f4"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.353502 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.353930 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.853913827 +0000 UTC m=+136.724943741 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.356346 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.356416 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" event={"ID":"aeb4ebff-81e9-4beb-b7fa-a800849d16ff","Type":"ContainerStarted","Data":"7c345aa26cae54dfa74cbc8c0a15899088eb0fa129f391db263f14737c6fb5e4"} Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.358204 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.365360 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9zw95"] Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.367834 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.371987 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm"] Dec 01 19:35:36 crc kubenswrapper[4888]: W1201 19:35:36.417659 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ba0afc8_40bb_446f_be9d_4532fe287240.slice/crio-7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745 WatchSource:0}: Error finding container 7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745: Status 404 returned error can't find the container with id 7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745 Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.423445 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7gg6f"] Dec 01 19:35:36 crc kubenswrapper[4888]: W1201 19:35:36.430352 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod790b6a30_feee_4d83_9dca_ccc8116b0b8d.slice/crio-fde2ad7248d1e632eebd2d437fca2170d22b3a80d55f5cd8572b1191f792eeee WatchSource:0}: Error finding container fde2ad7248d1e632eebd2d437fca2170d22b3a80d55f5cd8572b1191f792eeee: Status 404 returned error can't find the container with id fde2ad7248d1e632eebd2d437fca2170d22b3a80d55f5cd8572b1191f792eeee Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.454957 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.456955 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:36.956940209 +0000 UTC m=+136.827970123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.560500 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.560615 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.06059821 +0000 UTC m=+136.931628114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.560921 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.561237 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.06122967 +0000 UTC m=+136.932259584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.663927 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.664094 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.164066696 +0000 UTC m=+137.035096610 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.664277 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.664606 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.164592242 +0000 UTC m=+137.035622156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.765113 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.767367 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.267320025 +0000 UTC m=+137.138349939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.773895 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.776211 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.276195403 +0000 UTC m=+137.147225317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.875012 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.875302 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.375288256 +0000 UTC m=+137.246318170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.964114 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" Dec 01 19:35:36 crc kubenswrapper[4888]: I1201 19:35:36.977765 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:36 crc kubenswrapper[4888]: E1201 19:35:36.978116 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.478103882 +0000 UTC m=+137.349133796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.080952 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.081884 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.581867216 +0000 UTC m=+137.452897130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.152961 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-lkr2s" podStartSLOduration=118.152930423 podStartE2EDuration="1m58.152930423s" podCreationTimestamp="2025-12-01 19:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.141497147 +0000 UTC m=+137.012527061" watchObservedRunningTime="2025-12-01 19:35:37.152930423 +0000 UTC m=+137.023960357" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.153334 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" podStartSLOduration=117.153329315 podStartE2EDuration="1m57.153329315s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.103926602 +0000 UTC m=+136.974956516" watchObservedRunningTime="2025-12-01 19:35:37.153329315 +0000 UTC m=+137.024359229" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.185855 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.186530 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.686513357 +0000 UTC m=+137.557543261 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.224019 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" podStartSLOduration=117.224005239 podStartE2EDuration="1m57.224005239s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.221020809 +0000 UTC m=+137.092050723" watchObservedRunningTime="2025-12-01 19:35:37.224005239 +0000 UTC m=+137.095035153" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.287555 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.287945 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.78791635 +0000 UTC m=+137.658946254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.363798 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s8fd6" podStartSLOduration=117.363777181 podStartE2EDuration="1m57.363777181s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.363502863 +0000 UTC m=+137.234532777" watchObservedRunningTime="2025-12-01 19:35:37.363777181 +0000 UTC m=+137.234807095" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.366015 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-bt5fw" podStartSLOduration=117.366005639 podStartE2EDuration="1m57.366005639s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.324786154 +0000 UTC m=+137.195816098" watchObservedRunningTime="2025-12-01 19:35:37.366005639 +0000 UTC m=+137.237035553" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.382272 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" event={"ID":"5f780c25-3451-47ee-9c35-60d649683350","Type":"ContainerStarted","Data":"46ef52fc27d2cf2ebe1e385beb71be47682cfe290fe6a885878ac8ab0d8bea8d"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.397869 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.398294 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.898283144 +0000 UTC m=+137.769313058 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.429443 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" event={"ID":"a8e31e84-31ed-4549-81b7-7409c1f34c08","Type":"ContainerStarted","Data":"7b73c98b7c851dd2cf76b2b206e5ecc4be1004dc4dabea9eaa629892b6911b0f"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.446441 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" podStartSLOduration=117.446421778 podStartE2EDuration="1m57.446421778s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.430456896 +0000 UTC m=+137.301486810" watchObservedRunningTime="2025-12-01 19:35:37.446421778 +0000 UTC m=+137.317451682" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.447650 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.448564 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" event={"ID":"790b6a30-feee-4d83-9dca-ccc8116b0b8d","Type":"ContainerStarted","Data":"fde2ad7248d1e632eebd2d437fca2170d22b3a80d55f5cd8572b1191f792eeee"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.471483 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vdkm7"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.473890 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" event={"ID":"60a42fba-5e64-4a68-a9a3-e29ff836d97f","Type":"ContainerStarted","Data":"98466484b72953993cc64fc23e6e9775b42a132306bd198faa2dda480c48d857"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.484739 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" event={"ID":"6962c849-519b-40a6-a785-9428962b8e3c","Type":"ContainerStarted","Data":"e18c1e439d0b8fc4b089d2cf77fc93c275dd83ec2460ed6798252757923eb8eb"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.498653 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.499110 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:37.999084749 +0000 UTC m=+137.870114663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.512521 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gd6ps" event={"ID":"50de192e-a3d8-4fc9-94c6-ed727a3bffc1","Type":"ContainerStarted","Data":"2fcb3b53b8c36bd83312b6c18f4f5e08097e020d4d10932bd71546aa3844955e"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.527305 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" event={"ID":"f8d684ef-1323-46d4-aa9b-446ebdbd7d13","Type":"ContainerStarted","Data":"f6ebc578ff0956fc257082df8ff6b114dc563ed9cab61955421b0d3e23acf50b"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.551297 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.556247 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wzxcq" event={"ID":"b2d3b1bf-96e4-4a85-9ddb-730b02687767","Type":"ContainerStarted","Data":"1d83c761fdf124588b00c18df07bbee50e1e87f77f8a22ef634ee0668ec6a3cf"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.559593 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6rj7j" event={"ID":"d43d2671-5cdc-4203-9956-1de5f480a3cc","Type":"ContainerStarted","Data":"70e8b5bb04dd2785becefae70c79776199683aec3ed03d066ea9fe38ffa6665a"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.564643 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" event={"ID":"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279","Type":"ContainerStarted","Data":"d7f6583361ec4baeadcf8825ad4434d8ad0e7c46448bfd8e9f625b6927e8a443"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.580412 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-b9tj2" event={"ID":"929ef8f1-4b63-4c81-b892-6687da60b7f7","Type":"ContainerStarted","Data":"5ac23b53b1f9c1ccf83b952c6393ede645230e5e0c096b7060309985a3ba95b7"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.583242 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" event={"ID":"456a034d-a3c3-4cae-b0a1-4f5d0569ec08","Type":"ContainerStarted","Data":"ba7c3d89b0c1d95b7fdb2ae64b46e200b780fe899c08e5a1031df631a4bdf44b"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.592515 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mdtrk"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.596830 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" event={"ID":"0022a5ef-fe10-4344-824c-75462ac971c5","Type":"ContainerStarted","Data":"9e5cf2d43063f201f8763aac984809d63abb0775fedbc5bf37ac3c80c530ce3f"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.598727 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" event={"ID":"3b57ff58-42f5-48ae-8637-aa879cf54dc5","Type":"ContainerStarted","Data":"9aad7545b5515b77dea504a7f0962d7c0752bfa1e75389b021d5fff525f888ec"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.599808 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.600563 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.100551153 +0000 UTC m=+137.971581067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.601615 4888 generic.go:334] "Generic (PLEG): container finished" podID="0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb" containerID="4e316cddf041b344b2bdfb966901b66ee86434a7582ff90ece2d70cc9ff6d7b3" exitCode=0 Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.601671 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" event={"ID":"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb","Type":"ContainerDied","Data":"4e316cddf041b344b2bdfb966901b66ee86434a7582ff90ece2d70cc9ff6d7b3"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.616630 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" event={"ID":"420bd4d7-068c-4b38-b065-0d93466de36f","Type":"ContainerStarted","Data":"f15febe758ac486bdf5f38d18f13a9bb97ee8e412e42fa96330e0e4d3abbd407"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.622466 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-wzxcq" podStartSLOduration=117.622446825 podStartE2EDuration="1m57.622446825s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.619423834 +0000 UTC m=+137.490453748" watchObservedRunningTime="2025-12-01 19:35:37.622446825 +0000 UTC m=+137.493476739" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.633417 4888 generic.go:334] "Generic (PLEG): container finished" podID="86a0fb84-d4de-443a-86fb-273acb4138d0" containerID="963ca39d16d3c7fa5c13357b39277d1011674e724225cb7c855383fc809d9eb9" exitCode=0 Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.633476 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" event={"ID":"86a0fb84-d4de-443a-86fb-273acb4138d0","Type":"ContainerDied","Data":"963ca39d16d3c7fa5c13357b39277d1011674e724225cb7c855383fc809d9eb9"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.636038 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" event={"ID":"0105ed18-666e-4e81-aaf5-8e63ba162602","Type":"ContainerStarted","Data":"d38dee08d3fbb37427a506bd1266869fff8c2058e1dc2cf8b35a02ceebd6f51e"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.636075 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.637585 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" event={"ID":"50b1e183-9a9a-4daa-a769-78bc53d20c41","Type":"ContainerStarted","Data":"bc7d7860e6b6ba3432b5ef428f9c8488f45e87be52055fa8990af4af4864a002"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.645552 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" event={"ID":"a816c87b-de2f-4a92-a981-3808e168b282","Type":"ContainerStarted","Data":"0aa3fd29041b4cebb5d19bbfd823aad5e70445ab955445b6d3f0e76426e3cfab"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.650353 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.652648 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" podStartSLOduration=117.652631397 podStartE2EDuration="1m57.652631397s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.645082439 +0000 UTC m=+137.516112353" watchObservedRunningTime="2025-12-01 19:35:37.652631397 +0000 UTC m=+137.523661311" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.659580 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" event={"ID":"4f9ca160-603d-4864-a4e2-e52192731771","Type":"ContainerStarted","Data":"7f2e7b7ebe7d9c3051ac5f760a120a8f5d3a100d92ee7b2743168f42ae50971c"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.675755 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" event={"ID":"3ba0afc8-40bb-446f-be9d-4532fe287240","Type":"ContainerStarted","Data":"7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745"} Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.700851 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.701617 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.201604306 +0000 UTC m=+138.072634220 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.702607 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-zjc7f" podStartSLOduration=117.702577835 podStartE2EDuration="1m57.702577835s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.675750825 +0000 UTC m=+137.546780739" watchObservedRunningTime="2025-12-01 19:35:37.702577835 +0000 UTC m=+137.573607839" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.748867 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-b9tj2" podStartSLOduration=5.748843203 podStartE2EDuration="5.748843203s" podCreationTimestamp="2025-12-01 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.700247085 +0000 UTC m=+137.571276999" watchObservedRunningTime="2025-12-01 19:35:37.748843203 +0000 UTC m=+137.619873107" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.751880 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-42k5p" podStartSLOduration=117.751858864 podStartE2EDuration="1m57.751858864s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.738358636 +0000 UTC m=+137.609388560" watchObservedRunningTime="2025-12-01 19:35:37.751858864 +0000 UTC m=+137.622888778" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.790019 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.802667 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.804047 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t425v"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.807880 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-7ggph"] Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.815467 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.315448935 +0000 UTC m=+138.186478849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.827658 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.827967 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hpngg"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.843667 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rmlz" podStartSLOduration=117.843637826 podStartE2EDuration="1m57.843637826s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:37.76630417 +0000 UTC m=+137.637334104" watchObservedRunningTime="2025-12-01 19:35:37.843637826 +0000 UTC m=+137.714667770" Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.860490 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6"] Dec 01 19:35:37 crc kubenswrapper[4888]: I1201 19:35:37.903443 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:37 crc kubenswrapper[4888]: E1201 19:35:37.903910 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.403889916 +0000 UTC m=+138.274919830 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.006724 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.007090 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.507075522 +0000 UTC m=+138.378105446 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.107454 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.107624 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.607594408 +0000 UTC m=+138.478624322 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.107746 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.108087 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.608064462 +0000 UTC m=+138.479094466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.213440 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.214357 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.714335952 +0000 UTC m=+138.585365866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.314903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.315478 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.815466877 +0000 UTC m=+138.686496791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.380934 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.386666 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:38 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:38 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:38 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.386716 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.415797 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.416315 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:38.916297713 +0000 UTC m=+138.787327627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.523161 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.524332 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.024310966 +0000 UTC m=+138.895340880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.629912 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.630653 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.130629347 +0000 UTC m=+139.001659261 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.734996 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.735315 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.235305039 +0000 UTC m=+139.106334953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.780051 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" event={"ID":"790b6a30-feee-4d83-9dca-ccc8116b0b8d","Type":"ContainerStarted","Data":"2c8e8c2e77319abd40c8c8d2f4deaa47b87fad819bb850da2acaa62b0f541140"} Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.842480 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.842903 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.342885099 +0000 UTC m=+139.213915023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.864817 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" event={"ID":"5caa088d-82b7-45f9-b540-57fa54882521","Type":"ContainerStarted","Data":"938a3efb6af61838993d80d688f2d1eee0c27059edfc10f04161bf5f46cc24c8"} Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.904470 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" event={"ID":"0e9db052-f4a6-4bf5-9dd7-5ea6ee940deb","Type":"ContainerStarted","Data":"63ae467231a6e6cdb40f80f80f5e7fd66adfcd3200c906e2febef12e27118751"} Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.942990 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" event={"ID":"a8e31e84-31ed-4549-81b7-7409c1f34c08","Type":"ContainerStarted","Data":"ca5ecc87b416571b8abd181be6ba73781b86f0c7369560265bcb2b003242f130"} Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.944131 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.944811 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:38 crc kubenswrapper[4888]: E1201 19:35:38.945680 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.4455429 +0000 UTC m=+139.316572814 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.949443 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" event={"ID":"456a034d-a3c3-4cae-b0a1-4f5d0569ec08","Type":"ContainerStarted","Data":"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657"} Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.952955 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.955312 4888 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-5ksc9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" start-of-body= Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.955360 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.30:6443/healthz\": dial tcp 10.217.0.30:6443: connect: connection refused" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.957380 4888 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-mck5b container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.957439 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" podUID="a8e31e84-31ed-4549-81b7-7409c1f34c08" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/healthz\": dial tcp 10.217.0.17:8443: connect: connection refused" Dec 01 19:35:38 crc kubenswrapper[4888]: I1201 19:35:38.980746 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" event={"ID":"5f780c25-3451-47ee-9c35-60d649683350","Type":"ContainerStarted","Data":"d9b3b4138a2dfc4550272407017184cb9012d7cde6b596152b8bd61a78714033"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.008641 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" event={"ID":"1a9245f3-0247-4dd6-b4c8-0658f524bc1c","Type":"ContainerStarted","Data":"a0347957bdb9c303d1e0885a366aaa5c7c621c860ecbb1b8eacd53b0fd2da241"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.031898 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" podStartSLOduration=119.031878088 podStartE2EDuration="1m59.031878088s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.002858211 +0000 UTC m=+138.873888125" watchObservedRunningTime="2025-12-01 19:35:39.031878088 +0000 UTC m=+138.902907992" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.032290 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" podStartSLOduration=119.03228399 podStartE2EDuration="1m59.03228399s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.032001512 +0000 UTC m=+138.903031426" watchObservedRunningTime="2025-12-01 19:35:39.03228399 +0000 UTC m=+138.903313904" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.033124 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" event={"ID":"4f9ca160-603d-4864-a4e2-e52192731771","Type":"ContainerStarted","Data":"65eb872d500f967d7b4aa7c87823d1772acb6454360f99e942b93979d0b1e540"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.064687 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.071587 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.571543226 +0000 UTC m=+139.442573140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.077007 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" event={"ID":"6962c849-519b-40a6-a785-9428962b8e3c","Type":"ContainerStarted","Data":"ce622a348d61db28ac39064f04fcf273e3cc9c4be7767729098942ee852d59b6"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.085331 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" podStartSLOduration=119.085316122 podStartE2EDuration="1m59.085316122s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.082643891 +0000 UTC m=+138.953673805" watchObservedRunningTime="2025-12-01 19:35:39.085316122 +0000 UTC m=+138.956346036" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.167373 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.168923 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.668910097 +0000 UTC m=+139.539940011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.171277 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" event={"ID":"0a040efc-7545-47b3-b66e-654b88099f0a","Type":"ContainerStarted","Data":"d5969baa0dd427057b033d11e8dc5abe5c999a59809a07d1511e1e461a5cc222"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.171314 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" event={"ID":"0a040efc-7545-47b3-b66e-654b88099f0a","Type":"ContainerStarted","Data":"c871cc43a365d2bec5e4b8df84afbccb119e623d38d802de88ce122ecf435286"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.205370 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" event={"ID":"60a42fba-5e64-4a68-a9a3-e29ff836d97f","Type":"ContainerStarted","Data":"a53510148c21cd6c8cf8bae354cab1fcd3478c34c38af188ff985fc2a922904e"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.235278 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" event={"ID":"18fe9152-c196-4b59-bb0c-b01057148a0f","Type":"ContainerStarted","Data":"a7daad99cd8b70d678e1894f41174432b10318f4e5e94eb6ac7cb6bd73de0387"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.235532 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" event={"ID":"18fe9152-c196-4b59-bb0c-b01057148a0f","Type":"ContainerStarted","Data":"0c1d64a6fc3476a5343e4fe2f585fbdbf9a8bbc95b5249c49383b4d3552b60df"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.236616 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.241173 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" podStartSLOduration=119.241155389 podStartE2EDuration="1m59.241155389s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.199609454 +0000 UTC m=+139.070639368" watchObservedRunningTime="2025-12-01 19:35:39.241155389 +0000 UTC m=+139.112185303" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.262082 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" event={"ID":"7773020b-602c-4890-a3f5-944b5ba4a9a2","Type":"ContainerStarted","Data":"f1d84795565f15ab90f980fd539652f4acf196851b1e9b5bde6823a2932eb8c7"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.262135 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" event={"ID":"7773020b-602c-4890-a3f5-944b5ba4a9a2","Type":"ContainerStarted","Data":"f4a1c387eb35b90a87349a02196d49d02c21461e21f3c6de1471f6c89855e0d0"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.270650 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.271908 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.771893068 +0000 UTC m=+139.642922982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.274878 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-g4ddt" podStartSLOduration=119.274856927 podStartE2EDuration="1m59.274856927s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.244228102 +0000 UTC m=+139.115258016" watchObservedRunningTime="2025-12-01 19:35:39.274856927 +0000 UTC m=+139.145886841" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.282534 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.287596 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" event={"ID":"86a0fb84-d4de-443a-86fb-273acb4138d0","Type":"ContainerStarted","Data":"6d5fcf1badb6dae2cb83d2a6c62ab82ba4e13abc6e612bb9615c9d50785c649b"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.288241 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.316392 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-2w272" podStartSLOduration=119.316377222 podStartE2EDuration="1m59.316377222s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.276816926 +0000 UTC m=+139.147846860" watchObservedRunningTime="2025-12-01 19:35:39.316377222 +0000 UTC m=+139.187407136" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.316698 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-m99gv" podStartSLOduration=119.316692681 podStartE2EDuration="1m59.316692681s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.31467224 +0000 UTC m=+139.185702154" watchObservedRunningTime="2025-12-01 19:35:39.316692681 +0000 UTC m=+139.187722595" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.318731 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gd6ps" event={"ID":"50de192e-a3d8-4fc9-94c6-ed727a3bffc1","Type":"ContainerStarted","Data":"4f57466c147622be1cff6a0d7adefbbd679a3e22f1ae2ffab38a4617116d0a58"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.319543 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.326922 4888 patch_prober.go:28] interesting pod/downloads-7954f5f757-gd6ps container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.326969 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gd6ps" podUID="50de192e-a3d8-4fc9-94c6-ed727a3bffc1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.336660 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" event={"ID":"3b57ff58-42f5-48ae-8637-aa879cf54dc5","Type":"ContainerStarted","Data":"bd8d6ca42b05469deff6a737fdcfd8dc7722ebb80105cb2540fa6f5f55745c26"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.360636 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" event={"ID":"500c01ff-5b97-4c18-9080-f714a39ba531","Type":"ContainerStarted","Data":"48089430f8ca5c53d248044f83723add60affec1b25877c69ac7e34781da9e98"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.360685 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" event={"ID":"500c01ff-5b97-4c18-9080-f714a39ba531","Type":"ContainerStarted","Data":"ed185fb3a4329ad9de889a9925bf237c304d28fd829a16fe0b0373d57a86b0b4"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.379443 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.379809 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" podStartSLOduration=119.379791937 podStartE2EDuration="1m59.379791937s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.351208724 +0000 UTC m=+139.222238638" watchObservedRunningTime="2025-12-01 19:35:39.379791937 +0000 UTC m=+139.250821851" Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.380788 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.880776057 +0000 UTC m=+139.751805971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.389416 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:39 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:39 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:39 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.389488 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.397452 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" event={"ID":"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d","Type":"ContainerStarted","Data":"5cb8f0262b4a7da429b71e849d4357974824e2989e12641dd0ff7d175d415c04"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.422897 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" event={"ID":"6cb31fa6-3d13-4ccb-98c6-2a56fd37e279","Type":"ContainerStarted","Data":"db9a5ddc50d15c6d84422cb91a3a41f474d27acb78bc9829047e702eb06fc93b"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.431598 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" event={"ID":"35cbe699-623b-44fa-8402-ea41fd3b9a8e","Type":"ContainerStarted","Data":"bb649fc25f20f59741a2ea375c61f13f78a12d5fc851af9bab9347ac7436be42"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.483467 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lmqwn" podStartSLOduration=119.483447838 podStartE2EDuration="1m59.483447838s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.473327122 +0000 UTC m=+139.344357036" watchObservedRunningTime="2025-12-01 19:35:39.483447838 +0000 UTC m=+139.354477752" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.483845 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-hpngg" podStartSLOduration=119.48384086 podStartE2EDuration="1m59.48384086s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.441665496 +0000 UTC m=+139.312695410" watchObservedRunningTime="2025-12-01 19:35:39.48384086 +0000 UTC m=+139.354870774" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.484449 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.485528 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:39.98551363 +0000 UTC m=+139.856543544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.525959 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hj2dp" podStartSLOduration=119.525944902 podStartE2EDuration="1m59.525944902s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.515495056 +0000 UTC m=+139.386524970" watchObservedRunningTime="2025-12-01 19:35:39.525944902 +0000 UTC m=+139.396974806" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.565347 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vdkm7" event={"ID":"66a22559-1ea1-4cf3-86d5-2672504a1968","Type":"ContainerStarted","Data":"887c7ea0e640b61164185180ae16255bf5c969ca77daf23d9f804529357ca434"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.565396 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vdkm7" event={"ID":"66a22559-1ea1-4cf3-86d5-2672504a1968","Type":"ContainerStarted","Data":"44e62a8b4347690746aac9533ff890e5476d592a6e14df24c7dc260bb4548c54"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.592802 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.594042 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.094031138 +0000 UTC m=+139.965061052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.606652 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-d644g" event={"ID":"420bd4d7-068c-4b38-b065-0d93466de36f","Type":"ContainerStarted","Data":"77c53b7baaec229530b0d6b1f7b67c59f176de496a05258679fbe6796bed2934"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.630479 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-gd6ps" podStartSLOduration=119.630457159 podStartE2EDuration="1m59.630457159s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.623617312 +0000 UTC m=+139.494647256" watchObservedRunningTime="2025-12-01 19:35:39.630457159 +0000 UTC m=+139.501487073" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.630624 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" podStartSLOduration=119.630620673 podStartE2EDuration="1m59.630620673s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.575616932 +0000 UTC m=+139.446646846" watchObservedRunningTime="2025-12-01 19:35:39.630620673 +0000 UTC m=+139.501650587" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.634746 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" event={"ID":"f8d684ef-1323-46d4-aa9b-446ebdbd7d13","Type":"ContainerStarted","Data":"5c73bbc3ac1324f3af9da7f40b651678bcc73a6ee9cfc0a9089ca7035b341d9f"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.644360 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" event={"ID":"0105ed18-666e-4e81-aaf5-8e63ba162602","Type":"ContainerStarted","Data":"6ad3b5a2f6f2c3011eca97ac908996f1b0a36c3758d670bb2e180d4b4b4bfa5f"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.653010 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" event={"ID":"94df7fc9-9387-4d94-bc68-ba178504980e","Type":"ContainerStarted","Data":"5ae17f7c958e96c08ee9fbcf66a197d4ab9ac4b37229619f4b15763da50968cc"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.653054 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" event={"ID":"94df7fc9-9387-4d94-bc68-ba178504980e","Type":"ContainerStarted","Data":"fe5287060a42ce3a1fc5d17a7591698ecd947d430db915e38fc67d5d0faec2a2"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.669222 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" event={"ID":"3ba0afc8-40bb-446f-be9d-4532fe287240","Type":"ContainerStarted","Data":"b72a7e33253df98ce95b670c1b37184d43bc60dbb813b1343ec5d0b25fbb0257"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.679579 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" event={"ID":"0e2c960f-af3d-4547-b034-c77d3598b887","Type":"ContainerStarted","Data":"7d1adf846f58fd478f701c5c94cd1ed7f1c23fbfeeff45a7b87d0478c973dfc0"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.680327 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.681986 4888 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-7k5h6 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" start-of-body= Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.682023 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" podUID="0e2c960f-af3d-4547-b034-c77d3598b887" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.41:5443/healthz\": dial tcp 10.217.0.41:5443: connect: connection refused" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.691279 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" event={"ID":"fac68ebb-66d0-4ffa-b6a0-61f708fe70db","Type":"ContainerStarted","Data":"b6aeac5d0b808b179ff747a630b75a81346f3444077291218de5a5ef23e826d1"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.691329 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" event={"ID":"fac68ebb-66d0-4ffa-b6a0-61f708fe70db","Type":"ContainerStarted","Data":"680149811a94d61c8840570dfb42d5cde36681a18233d12b43c65baca9fb55b2"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.693292 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.693430 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.1933985 +0000 UTC m=+140.064428434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.693828 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.694256 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-7gg6f" podStartSLOduration=119.694239025 podStartE2EDuration="1m59.694239025s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.655467974 +0000 UTC m=+139.526497908" watchObservedRunningTime="2025-12-01 19:35:39.694239025 +0000 UTC m=+139.565268949" Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.697816 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.197794393 +0000 UTC m=+140.068824407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.724329 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7ggph" event={"ID":"eed00975-3972-4bc7-aac6-11986d1d5a74","Type":"ContainerStarted","Data":"c1d253ed5471c9780a0d2a58d632f2d783acc7c3754afefc21a433521f7ff78b"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.744714 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vdkm7" podStartSLOduration=7.744691209 podStartE2EDuration="7.744691209s" podCreationTimestamp="2025-12-01 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.713800446 +0000 UTC m=+139.584830370" watchObservedRunningTime="2025-12-01 19:35:39.744691209 +0000 UTC m=+139.615721123" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.776255 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" event={"ID":"f325c412-68ff-4735-a4fb-c5d2183d0401","Type":"ContainerStarted","Data":"0041546c1d2a6d109f67753706f4fda4ba674cec2828f6e8b25a6d1973c11ada"} Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.788998 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.789040 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.801129 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-b74fz" podStartSLOduration=119.801103553 podStartE2EDuration="1m59.801103553s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.751226627 +0000 UTC m=+139.622256571" watchObservedRunningTime="2025-12-01 19:35:39.801103553 +0000 UTC m=+139.672133467" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.809510 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.809896 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.309868018 +0000 UTC m=+140.180897942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.810197 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.810277 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-8zlm9" podStartSLOduration=119.81026084 podStartE2EDuration="1m59.81026084s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.809570959 +0000 UTC m=+139.680600873" watchObservedRunningTime="2025-12-01 19:35:39.81026084 +0000 UTC m=+139.681290754" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.811052 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" podStartSLOduration=119.811046454 podStartE2EDuration="1m59.811046454s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.785975626 +0000 UTC m=+139.657005550" watchObservedRunningTime="2025-12-01 19:35:39.811046454 +0000 UTC m=+139.682076368" Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.813531 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.313518768 +0000 UTC m=+140.184548682 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.860220 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" podStartSLOduration=119.860204298 podStartE2EDuration="1m59.860204298s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.859545698 +0000 UTC m=+139.730575612" watchObservedRunningTime="2025-12-01 19:35:39.860204298 +0000 UTC m=+139.731234202" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.894235 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" podStartSLOduration=119.894209805 podStartE2EDuration="1m59.894209805s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.887581365 +0000 UTC m=+139.758611289" watchObservedRunningTime="2025-12-01 19:35:39.894209805 +0000 UTC m=+139.765239719" Dec 01 19:35:39 crc kubenswrapper[4888]: I1201 19:35:39.947747 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:39 crc kubenswrapper[4888]: E1201 19:35:39.952561 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.452540527 +0000 UTC m=+140.323570441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.012805 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rl64v" podStartSLOduration=120.012787727 podStartE2EDuration="2m0.012787727s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:39.946407522 +0000 UTC m=+139.817437446" watchObservedRunningTime="2025-12-01 19:35:40.012787727 +0000 UTC m=+139.883817651" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.014373 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" podStartSLOduration=121.014363085 podStartE2EDuration="2m1.014363085s" podCreationTimestamp="2025-12-01 19:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:40.012007324 +0000 UTC m=+139.883037258" watchObservedRunningTime="2025-12-01 19:35:40.014363085 +0000 UTC m=+139.885392999" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.049908 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.050311 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.55029796 +0000 UTC m=+140.421327874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.052953 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.150631 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.151056 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.651035323 +0000 UTC m=+140.522065237 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.252353 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.252657 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.752642952 +0000 UTC m=+140.623672866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.353107 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.353310 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.853280942 +0000 UTC m=+140.724310856 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.353438 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.353901 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.853888651 +0000 UTC m=+140.724918565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.383921 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:40 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:40 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:40 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.384254 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.454820 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.455324 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:40.955308374 +0000 UTC m=+140.826338288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.556026 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.556511 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.056497631 +0000 UTC m=+140.927527545 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.657052 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.657412 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.157398479 +0000 UTC m=+141.028428393 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.665965 4888 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.759250 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.759657 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.259642817 +0000 UTC m=+141.130672731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.809106 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-tqvr6" event={"ID":"5f780c25-3451-47ee-9c35-60d649683350","Type":"ContainerStarted","Data":"2fabbd72459f3133dd9b3dc7dac800d34a4c1bec3227ac0dca5371b4256634bc"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.816704 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mfmd" event={"ID":"94df7fc9-9387-4d94-bc68-ba178504980e","Type":"ContainerStarted","Data":"31843ee474a96ecf38ac333932f6fb32c131f509802c010cdc17c26a401c9fcf"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.826845 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" event={"ID":"0a040efc-7545-47b3-b66e-654b88099f0a","Type":"ContainerStarted","Data":"f86b0f42d1cbbdafdecc117d846b925fd91ccd55760c89f6372bc0af465b0bed"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.828396 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5sqgg" event={"ID":"e4e15cf9-67c7-45c3-8d2f-38f4a77ed72d","Type":"ContainerStarted","Data":"1635a5e46b8b937d35079978938041d52cc2c9ea967ab555e30d5b72052051e0"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.834078 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" event={"ID":"5caa088d-82b7-45f9-b540-57fa54882521","Type":"ContainerStarted","Data":"caf1e3e46288b2fbe9442a4e3eb4e97d07d7752d06788861d006e5ed83a18777"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.839357 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7ggph" event={"ID":"eed00975-3972-4bc7-aac6-11986d1d5a74","Type":"ContainerStarted","Data":"3e001f04de71977f1c10211420ad86a8c60f1182100f8bb4ba2dff34bfa03825"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.839402 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-7ggph" event={"ID":"eed00975-3972-4bc7-aac6-11986d1d5a74","Type":"ContainerStarted","Data":"ccbd7515aff7732004fcd692649dd59ada16f83ba0e66a0e6af1279d2b50b966"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.839422 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.841065 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" event={"ID":"1a9245f3-0247-4dd6-b4c8-0658f524bc1c","Type":"ContainerStarted","Data":"70d02ffd313add1ebce42c8ad31e9c7cb315e752b2a8c867b4ae09a3af7e4bb3"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.841862 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.845479 4888 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-t425v container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" start-of-body= Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.845535 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.27:8080/healthz\": dial tcp 10.217.0.27:8080: connect: connection refused" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.853993 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" event={"ID":"4f9ca160-603d-4864-a4e2-e52192731771","Type":"ContainerStarted","Data":"2bddaeb72c8f68856105a3870f886b6643793a7a8f094a2e72e2c87ccc33e824"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.854043 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" event={"ID":"4f9ca160-603d-4864-a4e2-e52192731771","Type":"ContainerStarted","Data":"218b1981263e881283fc7f3cefb833c77f7c18e450ab41e6c1861049e23a33a4"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.860642 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.861061 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.36104066 +0000 UTC m=+141.232070584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.861094 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.861444 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.361426212 +0000 UTC m=+141.232456126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.881608 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" event={"ID":"f325c412-68ff-4735-a4fb-c5d2183d0401","Type":"ContainerStarted","Data":"7ef9a00a3dc83365ecc9d414c969eb6b09f998fc2add7e49b08c7a33d3a5a6b9"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.899629 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" event={"ID":"0e2c960f-af3d-4547-b034-c77d3598b887","Type":"ContainerStarted","Data":"2088d07e9be4306c4280929038e07393c6b1e3f9d01cbf521697aa887c342496"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.908155 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-665fr"] Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.909352 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.912026 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.915956 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-tj6dq" podStartSLOduration=120.915940088 podStartE2EDuration="2m0.915940088s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:40.91167157 +0000 UTC m=+140.782701484" watchObservedRunningTime="2025-12-01 19:35:40.915940088 +0000 UTC m=+140.786970022" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.926534 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" event={"ID":"790b6a30-feee-4d83-9dca-ccc8116b0b8d","Type":"ContainerStarted","Data":"6286860ece054a4a4b14b77ae3653b6c4964e92adc3f89561100ee15e35549ac"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.926940 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.942894 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" event={"ID":"35cbe699-623b-44fa-8402-ea41fd3b9a8e","Type":"ContainerStarted","Data":"494cd57423611ec658dc9e831f442f536984ddce98b03e68d5916c9333fb87d4"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.942933 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" event={"ID":"35cbe699-623b-44fa-8402-ea41fd3b9a8e","Type":"ContainerStarted","Data":"5ad9c6ea0e1853f003b21b0fd48624bbf79c4e923962a46b1b634bfa1c075f3a"} Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.943951 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-665fr"] Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.946400 4888 patch_prober.go:28] interesting pod/downloads-7954f5f757-gd6ps container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.946436 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gd6ps" podUID="50de192e-a3d8-4fc9-94c6-ed727a3bffc1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.955101 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.960553 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-mck5b" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.962689 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:40 crc kubenswrapper[4888]: E1201 19:35:40.963799 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.463785004 +0000 UTC m=+141.334814918 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.969399 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-skz8b" Dec 01 19:35:40 crc kubenswrapper[4888]: I1201 19:35:40.996388 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-r9km8" podStartSLOduration=120.996369208 podStartE2EDuration="2m0.996369208s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:40.99015616 +0000 UTC m=+140.861186074" watchObservedRunningTime="2025-12-01 19:35:40.996369208 +0000 UTC m=+140.867399122" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.049321 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-7ggph" podStartSLOduration=9.049304017 podStartE2EDuration="9.049304017s" podCreationTimestamp="2025-12-01 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:41.047646827 +0000 UTC m=+140.918676751" watchObservedRunningTime="2025-12-01 19:35:41.049304017 +0000 UTC m=+140.920333931" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.065922 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.066008 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.066177 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksz8f\" (UniqueName: \"kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.066781 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.085454 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.585433328 +0000 UTC m=+141.456463242 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.088410 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hqn42"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.124415 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.129114 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqn42"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.129136 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" podStartSLOduration=121.129119958 podStartE2EDuration="2m1.129119958s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:41.128240161 +0000 UTC m=+140.999270075" watchObservedRunningTime="2025-12-01 19:35:41.129119958 +0000 UTC m=+141.000149872" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.136255 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.167871 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168065 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksz8f\" (UniqueName: \"kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168143 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27tpw\" (UniqueName: \"kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168164 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168199 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168248 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.168265 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.168446 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.668428765 +0000 UTC m=+141.539458679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.169379 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.172148 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.202321 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-7k5h6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.208708 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksz8f\" (UniqueName: \"kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f\") pod \"certified-operators-665fr\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.225528 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-mdtrk" podStartSLOduration=121.225511469 podStartE2EDuration="2m1.225511469s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:41.223985703 +0000 UTC m=+141.095015607" watchObservedRunningTime="2025-12-01 19:35:41.225511469 +0000 UTC m=+141.096541383" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.234454 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.265509 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.266841 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.276389 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27tpw\" (UniqueName: \"kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.276450 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.276511 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.276530 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.276967 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.277471 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.277742 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.777731737 +0000 UTC m=+141.648761651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.279156 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" podStartSLOduration=121.27914841 podStartE2EDuration="2m1.27914841s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:41.278293734 +0000 UTC m=+141.149323638" watchObservedRunningTime="2025-12-01 19:35:41.27914841 +0000 UTC m=+141.150178324" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.287238 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.330015 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27tpw\" (UniqueName: \"kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw\") pod \"community-operators-hqn42\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.377076 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.377486 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9fkq\" (UniqueName: \"kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.377540 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.377590 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.378585 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.878562773 +0000 UTC m=+141.749592687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.388432 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:41 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:41 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:41 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.388837 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.453458 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.485211 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9fkq\" (UniqueName: \"kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.485534 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.485576 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.486198 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.486977 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.487159 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.487479 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:41.987465732 +0000 UTC m=+141.858495646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.488498 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.507641 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.508394 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.541154 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9fkq\" (UniqueName: \"kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq\") pod \"certified-operators-vt9c7\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.587908 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.591910 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.592125 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 19:35:42.092091781 +0000 UTC m=+141.963121695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.592325 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.592619 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.592745 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.592930 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nd5v\" (UniqueName: \"kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: E1201 19:35:41.593524 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 19:35:42.093509414 +0000 UTC m=+141.964539328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-98xb2" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.602410 4888 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-01T19:35:40.665990268Z","Handler":null,"Name":""} Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.619919 4888 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.619953 4888 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.666276 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.697820 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.698522 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.698671 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.698787 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nd5v\" (UniqueName: \"kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.698883 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.700404 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.733727 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.737204 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nd5v\" (UniqueName: \"kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v\") pod \"community-operators-nzbl6\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.800018 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.803873 4888 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.803917 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.834987 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.874091 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-98xb2\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.885676 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hqn42"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.955516 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-665fr"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.984787 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerStarted","Data":"5038007ee77de8ab45777ca047f019fbac8cca5799f9b032ff3b33a672d51c51"} Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.992389 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" event={"ID":"4f9ca160-603d-4864-a4e2-e52192731771","Type":"ContainerStarted","Data":"256a24f31e34187374ee37daf27dabe6d2ccd7608e5a7887a6b7baa6c51e81d4"} Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.994146 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:35:41 crc kubenswrapper[4888]: I1201 19:35:41.999643 4888 generic.go:334] "Generic (PLEG): container finished" podID="3ba0afc8-40bb-446f-be9d-4532fe287240" containerID="b72a7e33253df98ce95b670c1b37184d43bc60dbb813b1343ec5d0b25fbb0257" exitCode=0 Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.000367 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" event={"ID":"3ba0afc8-40bb-446f-be9d-4532fe287240","Type":"ContainerDied","Data":"b72a7e33253df98ce95b670c1b37184d43bc60dbb813b1343ec5d0b25fbb0257"} Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.001602 4888 patch_prober.go:28] interesting pod/downloads-7954f5f757-gd6ps container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.001648 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gd6ps" podUID="50de192e-a3d8-4fc9-94c6-ed727a3bffc1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.014239 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9zw95" podStartSLOduration=10.014220852 podStartE2EDuration="10.014220852s" podCreationTimestamp="2025-12-01 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:42.010998675 +0000 UTC m=+141.882028589" watchObservedRunningTime="2025-12-01 19:35:42.014220852 +0000 UTC m=+141.885250766" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.029049 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.029988 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p8s4h" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.139546 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.150035 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:35:42 crc kubenswrapper[4888]: W1201 19:35:42.167444 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78e273f1_d652_46e4_afe2_f9691d8d48e2.slice/crio-ac629657e9c13ab59dc6f33d154282784ef6b185acce102d67fb51d230780a74 WatchSource:0}: Error finding container ac629657e9c13ab59dc6f33d154282784ef6b185acce102d67fb51d230780a74: Status 404 returned error can't find the container with id ac629657e9c13ab59dc6f33d154282784ef6b185acce102d67fb51d230780a74 Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.380242 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:42 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:42 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:42 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.380309 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.458495 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.546413 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:35:42 crc kubenswrapper[4888]: W1201 19:35:42.551977 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod284ef7ed_219e_48bd_8b60_9d16ae856c9e.slice/crio-5fcb0b1894a3e8d250a23fefa74f4181fa650b9e77996503d5ec844cbf02749c WatchSource:0}: Error finding container 5fcb0b1894a3e8d250a23fefa74f4181fa650b9e77996503d5ec844cbf02749c: Status 404 returned error can't find the container with id 5fcb0b1894a3e8d250a23fefa74f4181fa650b9e77996503d5ec844cbf02749c Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.748666 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.749602 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.751601 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.751628 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.760490 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.815637 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.816029 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.842103 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf8p"] Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.845894 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.849133 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.860430 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf8p"] Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.916907 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh8vp\" (UniqueName: \"kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.916994 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.917016 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.917044 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.917070 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.917127 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:42 crc kubenswrapper[4888]: I1201 19:35:42.953980 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.006881 4888 generic.go:334] "Generic (PLEG): container finished" podID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerID="d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b" exitCode=0 Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.006954 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerDied","Data":"d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.006983 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerStarted","Data":"d62fa5e8071e7d6018096931f36d14dbe3004677c3fefe187b206deefd7edb87"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.008714 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.008963 4888 generic.go:334] "Generic (PLEG): container finished" podID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerID="05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1" exitCode=0 Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.009028 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerDied","Data":"05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.010686 4888 generic.go:334] "Generic (PLEG): container finished" podID="f9a74618-f827-452a-988f-c7f314143925" containerID="c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294" exitCode=0 Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.010736 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerDied","Data":"c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.010759 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerStarted","Data":"8fbf10f79519da7d280683e5f8cbab5e53ba9fb7515ad4b753d84bc983558551"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.012586 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" event={"ID":"284ef7ed-219e-48bd-8b60-9d16ae856c9e","Type":"ContainerStarted","Data":"8bd6c7a7a72d154d5b39f0af535ba41689615ee51ac615efbc4319561774bd85"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.012632 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" event={"ID":"284ef7ed-219e-48bd-8b60-9d16ae856c9e","Type":"ContainerStarted","Data":"5fcb0b1894a3e8d250a23fefa74f4181fa650b9e77996503d5ec844cbf02749c"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.012694 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.014219 4888 generic.go:334] "Generic (PLEG): container finished" podID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerID="c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31" exitCode=0 Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.014245 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerDied","Data":"c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.014285 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerStarted","Data":"ac629657e9c13ab59dc6f33d154282784ef6b185acce102d67fb51d230780a74"} Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.018068 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.018110 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.018203 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh8vp\" (UniqueName: \"kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.018513 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.018806 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.049117 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh8vp\" (UniqueName: \"kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp\") pod \"redhat-marketplace-ppf8p\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.066578 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.122542 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" podStartSLOduration=123.12251828 podStartE2EDuration="2m3.12251828s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:43.120071376 +0000 UTC m=+142.991101300" watchObservedRunningTime="2025-12-01 19:35:43.12251828 +0000 UTC m=+142.993548194" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.162850 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.252146 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.254447 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.287855 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.293813 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.324203 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.324295 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.324355 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-955vh\" (UniqueName: \"kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.370014 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.390330 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:43 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:43 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:43 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.390379 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.429602 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") pod \"3ba0afc8-40bb-446f-be9d-4532fe287240\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430071 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-266nn\" (UniqueName: \"kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn\") pod \"3ba0afc8-40bb-446f-be9d-4532fe287240\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430194 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume\") pod \"3ba0afc8-40bb-446f-be9d-4532fe287240\" (UID: \"3ba0afc8-40bb-446f-be9d-4532fe287240\") " Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430424 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430459 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume" (OuterVolumeSpecName: "config-volume") pod "3ba0afc8-40bb-446f-be9d-4532fe287240" (UID: "3ba0afc8-40bb-446f-be9d-4532fe287240"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430471 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430534 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-955vh\" (UniqueName: \"kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430585 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ba0afc8-40bb-446f-be9d-4532fe287240-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.430811 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.431052 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.438599 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn" (OuterVolumeSpecName: "kube-api-access-266nn") pod "3ba0afc8-40bb-446f-be9d-4532fe287240" (UID: "3ba0afc8-40bb-446f-be9d-4532fe287240"). InnerVolumeSpecName "kube-api-access-266nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.439516 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3ba0afc8-40bb-446f-be9d-4532fe287240" (UID: "3ba0afc8-40bb-446f-be9d-4532fe287240"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.460129 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-955vh\" (UniqueName: \"kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh\") pod \"redhat-marketplace-jgt74\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.532959 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-266nn\" (UniqueName: \"kubernetes.io/projected/3ba0afc8-40bb-446f-be9d-4532fe287240-kube-api-access-266nn\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.532996 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ba0afc8-40bb-446f-be9d-4532fe287240-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.566616 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf8p"] Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.631942 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:35:43 crc kubenswrapper[4888]: I1201 19:35:43.894443 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.020873 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerStarted","Data":"f9e2846c7292e5c7eb20a2c2d59a2279231670f06f567ad9b82ad61e1502dd22"} Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.022584 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" event={"ID":"3ba0afc8-40bb-446f-be9d-4532fe287240","Type":"ContainerDied","Data":"7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745"} Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.022606 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b15909777e925dd62780080df864908ac5ab7206eebba4798aeacdac819b745" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.022629 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.023678 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"da3a101a-9114-4785-974b-633d9725c610","Type":"ContainerStarted","Data":"005add107f9547a64cadd7937e549c7c756fefee8edd36bd1ba169341c866ee2"} Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.025605 4888 generic.go:334] "Generic (PLEG): container finished" podID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerID="7a3499e4296f1d02ce3a6a3911943e6de82eec50190f1a6d76976ca21d36d903" exitCode=0 Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.025697 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerDied","Data":"7a3499e4296f1d02ce3a6a3911943e6de82eec50190f1a6d76976ca21d36d903"} Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.025720 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerStarted","Data":"1d08840f9510c9072635ada54bdcf182f120078e22efe999159171436ba2fd85"} Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.041680 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pspwm"] Dec 01 19:35:44 crc kubenswrapper[4888]: E1201 19:35:44.041923 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba0afc8-40bb-446f-be9d-4532fe287240" containerName="collect-profiles" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.041947 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba0afc8-40bb-446f-be9d-4532fe287240" containerName="collect-profiles" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.042059 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ba0afc8-40bb-446f-be9d-4532fe287240" containerName="collect-profiles" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.042883 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.045825 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.060014 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pspwm"] Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.141753 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5fd5\" (UniqueName: \"kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.141837 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.141878 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.242863 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5fd5\" (UniqueName: \"kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.242916 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.242949 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.243419 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.243470 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.273464 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5fd5\" (UniqueName: \"kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5\") pod \"redhat-operators-pspwm\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.392495 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.394500 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:44 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:44 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:44 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.394553 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.441087 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.442428 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.468348 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.501884 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.501942 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.506475 4888 patch_prober.go:28] interesting pod/console-f9d7485db-bt5fw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.506546 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-bt5fw" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerName="console" probeResult="failure" output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.547194 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.547361 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.547394 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrwhl\" (UniqueName: \"kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.626771 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.626818 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.641960 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.648219 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.648286 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrwhl\" (UniqueName: \"kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.648338 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.648778 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.649290 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.673918 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrwhl\" (UniqueName: \"kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl\") pod \"redhat-operators-595jt\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.721479 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pspwm"] Dec 01 19:35:44 crc kubenswrapper[4888]: I1201 19:35:44.767282 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:35:44 crc kubenswrapper[4888]: W1201 19:35:44.773051 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25e7abac_5f63_4c75_91d9_e801ccf75389.slice/crio-b7e6e7451d649325bd175cb69d4d3d2367d4cf55eda61421da16ba886ddd656d WatchSource:0}: Error finding container b7e6e7451d649325bd175cb69d4d3d2367d4cf55eda61421da16ba886ddd656d: Status 404 returned error can't find the container with id b7e6e7451d649325bd175cb69d4d3d2367d4cf55eda61421da16ba886ddd656d Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.032458 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.053611 4888 generic.go:334] "Generic (PLEG): container finished" podID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerID="f5a04cf9eb02d25b9f82191ae66a579852a3ae2035cf46fdf838b062c6a94ec3" exitCode=0 Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.053710 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerDied","Data":"f5a04cf9eb02d25b9f82191ae66a579852a3ae2035cf46fdf838b062c6a94ec3"} Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.056226 4888 generic.go:334] "Generic (PLEG): container finished" podID="da3a101a-9114-4785-974b-633d9725c610" containerID="82df2e95a284f4292491e02e91eb58623fcce2d7705ce3ba515380d503d55b8d" exitCode=0 Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.056298 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"da3a101a-9114-4785-974b-633d9725c610","Type":"ContainerDied","Data":"82df2e95a284f4292491e02e91eb58623fcce2d7705ce3ba515380d503d55b8d"} Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.059544 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerStarted","Data":"740f66fe651111ae05f94f588c2b7e4ffd64444b8e256db3dd08a6a8047561a8"} Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.059602 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerStarted","Data":"b7e6e7451d649325bd175cb69d4d3d2367d4cf55eda61421da16ba886ddd656d"} Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.064336 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-l4k97" Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.327761 4888 patch_prober.go:28] interesting pod/downloads-7954f5f757-gd6ps container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.327815 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gd6ps" podUID="50de192e-a3d8-4fc9-94c6-ed727a3bffc1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.328129 4888 patch_prober.go:28] interesting pod/downloads-7954f5f757-gd6ps container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.328173 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gd6ps" podUID="50de192e-a3d8-4fc9-94c6-ed727a3bffc1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.381123 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.396295 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:45 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:45 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:45 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:45 crc kubenswrapper[4888]: I1201 19:35:45.396376 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.067584 4888 generic.go:334] "Generic (PLEG): container finished" podID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerID="740f66fe651111ae05f94f588c2b7e4ffd64444b8e256db3dd08a6a8047561a8" exitCode=0 Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.067663 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerDied","Data":"740f66fe651111ae05f94f588c2b7e4ffd64444b8e256db3dd08a6a8047561a8"} Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.072034 4888 generic.go:334] "Generic (PLEG): container finished" podID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerID="1a6856137e8a64f5e85b3f8a18224a1a9cdd82917948de43c6b2192b1024cdca" exitCode=0 Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.072666 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerDied","Data":"1a6856137e8a64f5e85b3f8a18224a1a9cdd82917948de43c6b2192b1024cdca"} Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.072702 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerStarted","Data":"599cb96ec33d50ce74946313f5ffac1aece058a98e7839b9c13bfc20165215a1"} Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.364826 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.383450 4888 patch_prober.go:28] interesting pod/router-default-5444994796-wzxcq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 19:35:46 crc kubenswrapper[4888]: [-]has-synced failed: reason withheld Dec 01 19:35:46 crc kubenswrapper[4888]: [+]process-running ok Dec 01 19:35:46 crc kubenswrapper[4888]: healthz check failed Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.383540 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wzxcq" podUID="b2d3b1bf-96e4-4a85-9ddb-730b02687767" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.404575 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.404650 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.405564 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.413711 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.507016 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir\") pod \"da3a101a-9114-4785-974b-633d9725c610\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.507082 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access\") pod \"da3a101a-9114-4785-974b-633d9725c610\" (UID: \"da3a101a-9114-4785-974b-633d9725c610\") " Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.507359 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.507771 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "da3a101a-9114-4785-974b-633d9725c610" (UID: "da3a101a-9114-4785-974b-633d9725c610"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.507929 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.508840 4888 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/da3a101a-9114-4785-974b-633d9725c610-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.512040 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "da3a101a-9114-4785-974b-633d9725c610" (UID: "da3a101a-9114-4785-974b-633d9725c610"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.512707 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.514222 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.514550 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.530351 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.537033 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 19:35:46 crc kubenswrapper[4888]: I1201 19:35:46.610394 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/da3a101a-9114-4785-974b-633d9725c610-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:47 crc kubenswrapper[4888]: W1201 19:35:47.078079 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-385d9acc91170b2fcce3e6e9ad0f507a29327062c764fe43e43016c07e3f873b WatchSource:0}: Error finding container 385d9acc91170b2fcce3e6e9ad0f507a29327062c764fe43e43016c07e3f873b: Status 404 returned error can't find the container with id 385d9acc91170b2fcce3e6e9ad0f507a29327062c764fe43e43016c07e3f873b Dec 01 19:35:47 crc kubenswrapper[4888]: W1201 19:35:47.087833 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-5a759d0d17ababab71406059a9c0feeed3cbf42754663bdc82c5d9d2172fb219 WatchSource:0}: Error finding container 5a759d0d17ababab71406059a9c0feeed3cbf42754663bdc82c5d9d2172fb219: Status 404 returned error can't find the container with id 5a759d0d17ababab71406059a9c0feeed3cbf42754663bdc82c5d9d2172fb219 Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.134435 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5a759d0d17ababab71406059a9c0feeed3cbf42754663bdc82c5d9d2172fb219"} Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.138686 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"385d9acc91170b2fcce3e6e9ad0f507a29327062c764fe43e43016c07e3f873b"} Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.139969 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3cfa632c4ec482eadd1ca93b19e95086f4731d94b4c6c76030758cabb9b51bba"} Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.143128 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"da3a101a-9114-4785-974b-633d9725c610","Type":"ContainerDied","Data":"005add107f9547a64cadd7937e549c7c756fefee8edd36bd1ba169341c866ee2"} Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.143160 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="005add107f9547a64cadd7937e549c7c756fefee8edd36bd1ba169341c866ee2" Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.143209 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.381683 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:47 crc kubenswrapper[4888]: I1201 19:35:47.388489 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-wzxcq" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.168321 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6d6b2f3106080a28af8982043176b5f3b32fe687527aecc325c2aae6f68c94b6"} Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.168509 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.172813 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"dcc703db73d1e60d6e8ba67ec18e4e857cac3ba2d3ef924816cdfd5a2923b4b2"} Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.189006 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b6c064f46d37f6c5510de8a8e10019bd34f57b89ed5b2dc485804b94db27cd53"} Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.193711 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 19:35:48 crc kubenswrapper[4888]: E1201 19:35:48.194026 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da3a101a-9114-4785-974b-633d9725c610" containerName="pruner" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.194047 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="da3a101a-9114-4785-974b-633d9725c610" containerName="pruner" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.194162 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="da3a101a-9114-4785-974b-633d9725c610" containerName="pruner" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.194681 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.202831 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.203060 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.205140 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.234639 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.234809 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.335488 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.335844 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.335867 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.539828 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:48 crc kubenswrapper[4888]: I1201 19:35:48.833525 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:49 crc kubenswrapper[4888]: I1201 19:35:49.194547 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 19:35:49 crc kubenswrapper[4888]: W1201 19:35:49.221106 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1ef5a47a_6141_4276_8d5a_9b3e8620fad0.slice/crio-d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488 WatchSource:0}: Error finding container d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488: Status 404 returned error can't find the container with id d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488 Dec 01 19:35:50 crc kubenswrapper[4888]: I1201 19:35:50.037860 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:35:50 crc kubenswrapper[4888]: I1201 19:35:50.038147 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:35:50 crc kubenswrapper[4888]: I1201 19:35:50.246634 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ef5a47a-6141-4276-8d5a-9b3e8620fad0","Type":"ContainerStarted","Data":"d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488"} Dec 01 19:35:50 crc kubenswrapper[4888]: I1201 19:35:50.489243 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-7ggph" Dec 01 19:35:51 crc kubenswrapper[4888]: I1201 19:35:51.254969 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ef5a47a-6141-4276-8d5a-9b3e8620fad0","Type":"ContainerStarted","Data":"507e95358d913d4f850d3e1fc19ae6c0dcacdc1ebce19f3c36c322ce93aee6c5"} Dec 01 19:35:51 crc kubenswrapper[4888]: I1201 19:35:51.269942 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.269919569 podStartE2EDuration="3.269919569s" podCreationTimestamp="2025-12-01 19:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:35:51.267670451 +0000 UTC m=+151.138700365" watchObservedRunningTime="2025-12-01 19:35:51.269919569 +0000 UTC m=+151.140949483" Dec 01 19:35:52 crc kubenswrapper[4888]: I1201 19:35:52.265138 4888 generic.go:334] "Generic (PLEG): container finished" podID="1ef5a47a-6141-4276-8d5a-9b3e8620fad0" containerID="507e95358d913d4f850d3e1fc19ae6c0dcacdc1ebce19f3c36c322ce93aee6c5" exitCode=0 Dec 01 19:35:52 crc kubenswrapper[4888]: I1201 19:35:52.265368 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ef5a47a-6141-4276-8d5a-9b3e8620fad0","Type":"ContainerDied","Data":"507e95358d913d4f850d3e1fc19ae6c0dcacdc1ebce19f3c36c322ce93aee6c5"} Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.553224 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.740934 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir\") pod \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.741112 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access\") pod \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\" (UID: \"1ef5a47a-6141-4276-8d5a-9b3e8620fad0\") " Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.741118 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1ef5a47a-6141-4276-8d5a-9b3e8620fad0" (UID: "1ef5a47a-6141-4276-8d5a-9b3e8620fad0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.741454 4888 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.746439 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1ef5a47a-6141-4276-8d5a-9b3e8620fad0" (UID: "1ef5a47a-6141-4276-8d5a-9b3e8620fad0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:35:53 crc kubenswrapper[4888]: I1201 19:35:53.842518 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ef5a47a-6141-4276-8d5a-9b3e8620fad0-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:35:54 crc kubenswrapper[4888]: I1201 19:35:54.281876 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ef5a47a-6141-4276-8d5a-9b3e8620fad0","Type":"ContainerDied","Data":"d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488"} Dec 01 19:35:54 crc kubenswrapper[4888]: I1201 19:35:54.281917 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6e3755416327bf5f942e0d23e094699b0bb50400980e1ff4d36aab06f0ba488" Dec 01 19:35:54 crc kubenswrapper[4888]: I1201 19:35:54.282022 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 19:35:54 crc kubenswrapper[4888]: I1201 19:35:54.522872 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:54 crc kubenswrapper[4888]: I1201 19:35:54.532360 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:35:55 crc kubenswrapper[4888]: I1201 19:35:55.341890 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-gd6ps" Dec 01 19:36:02 crc kubenswrapper[4888]: I1201 19:36:02.145366 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:36:02 crc kubenswrapper[4888]: I1201 19:36:02.184701 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:36:02 crc kubenswrapper[4888]: I1201 19:36:02.203947 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4a71b974-d433-46e2-904d-2d955ba74014-metrics-certs\") pod \"network-metrics-daemon-gb7nn\" (UID: \"4a71b974-d433-46e2-904d-2d955ba74014\") " pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:36:02 crc kubenswrapper[4888]: I1201 19:36:02.365773 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gb7nn" Dec 01 19:36:11 crc kubenswrapper[4888]: E1201 19:36:11.458449 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 01 19:36:11 crc kubenswrapper[4888]: E1201 19:36:11.459267 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9nd5v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-nzbl6_openshift-marketplace(78e273f1-d652-46e4-afe2-f9691d8d48e2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 19:36:11 crc kubenswrapper[4888]: E1201 19:36:11.460462 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-nzbl6" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" Dec 01 19:36:12 crc kubenswrapper[4888]: E1201 19:36:12.749329 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-nzbl6" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" Dec 01 19:36:15 crc kubenswrapper[4888]: I1201 19:36:15.127780 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rg7sz" Dec 01 19:36:16 crc kubenswrapper[4888]: E1201 19:36:16.417139 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 01 19:36:16 crc kubenswrapper[4888]: E1201 19:36:16.417766 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ksz8f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-665fr_openshift-marketplace(12306c7a-35ef-4c7c-9d19-dc9463a224d0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 01 19:36:16 crc kubenswrapper[4888]: E1201 19:36:16.419554 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-665fr" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" Dec 01 19:36:16 crc kubenswrapper[4888]: I1201 19:36:16.699752 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gb7nn"] Dec 01 19:36:16 crc kubenswrapper[4888]: W1201 19:36:16.737109 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a71b974_d433_46e2_904d_2d955ba74014.slice/crio-d32b578374efaa339c3f697139cf13e236e9eb152803d1c64df5432a76329da0 WatchSource:0}: Error finding container d32b578374efaa339c3f697139cf13e236e9eb152803d1c64df5432a76329da0: Status 404 returned error can't find the container with id d32b578374efaa339c3f697139cf13e236e9eb152803d1c64df5432a76329da0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.422575 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerStarted","Data":"783674afee97de4c406e99a603a573754d7fa9fbe208660490926a28b08c1134"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.426387 4888 generic.go:334] "Generic (PLEG): container finished" podID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerID="92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c" exitCode=0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.426447 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerDied","Data":"92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.429587 4888 generic.go:334] "Generic (PLEG): container finished" podID="f9a74618-f827-452a-988f-c7f314143925" containerID="11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0" exitCode=0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.429638 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerDied","Data":"11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.432540 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" event={"ID":"4a71b974-d433-46e2-904d-2d955ba74014","Type":"ContainerStarted","Data":"09dbd71084607a9b99eb0a8a64adc1bd6e7d13aaf819b6e96fc88232394d718f"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.432566 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" event={"ID":"4a71b974-d433-46e2-904d-2d955ba74014","Type":"ContainerStarted","Data":"d32b578374efaa339c3f697139cf13e236e9eb152803d1c64df5432a76329da0"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.435336 4888 generic.go:334] "Generic (PLEG): container finished" podID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerID="a12233db1b9431f2bed4faf4093611dd0f980b290cca5a2ee4c57181cb4f0679" exitCode=0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.435446 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerDied","Data":"a12233db1b9431f2bed4faf4093611dd0f980b290cca5a2ee4c57181cb4f0679"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.440916 4888 generic.go:334] "Generic (PLEG): container finished" podID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerID="35b474013b7b7eb2b430ebb91eac8baa23ae41e1f2e07404b39b1fe43e065ed7" exitCode=0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.440972 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerDied","Data":"35b474013b7b7eb2b430ebb91eac8baa23ae41e1f2e07404b39b1fe43e065ed7"} Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.443979 4888 generic.go:334] "Generic (PLEG): container finished" podID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerID="136af63f3658007e2aff14d42428bb73aeb93c79c28b715d8e06f48a6d3e0b08" exitCode=0 Dec 01 19:36:17 crc kubenswrapper[4888]: I1201 19:36:17.444031 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerDied","Data":"136af63f3658007e2aff14d42428bb73aeb93c79c28b715d8e06f48a6d3e0b08"} Dec 01 19:36:17 crc kubenswrapper[4888]: E1201 19:36:17.446795 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-665fr" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" Dec 01 19:36:18 crc kubenswrapper[4888]: I1201 19:36:18.453098 4888 generic.go:334] "Generic (PLEG): container finished" podID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerID="783674afee97de4c406e99a603a573754d7fa9fbe208660490926a28b08c1134" exitCode=0 Dec 01 19:36:18 crc kubenswrapper[4888]: I1201 19:36:18.459889 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerDied","Data":"783674afee97de4c406e99a603a573754d7fa9fbe208660490926a28b08c1134"} Dec 01 19:36:18 crc kubenswrapper[4888]: I1201 19:36:18.459936 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gb7nn" event={"ID":"4a71b974-d433-46e2-904d-2d955ba74014","Type":"ContainerStarted","Data":"3636ac5671827603ac6b159c03c92b0e9fd190d967058deec3484c629140d6ee"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.463316 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerStarted","Data":"d4b34ea579c316de84216252b88ef654b2e75e28e56be88dcab33ba7f69bd665"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.465298 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerStarted","Data":"b44acb3e3124ec1da759a6f2a729c71e4bcf343db89566a5503e1f926fd32572"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.472754 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerStarted","Data":"aad7c55e6a61ec3e5533234bd3a580bba65c0081073e50854948479f22174ec5"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.481961 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerStarted","Data":"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.484954 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerStarted","Data":"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7"} Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.487463 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gb7nn" podStartSLOduration=159.487451791 podStartE2EDuration="2m39.487451791s" podCreationTimestamp="2025-12-01 19:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:36:18.493835138 +0000 UTC m=+178.364865052" watchObservedRunningTime="2025-12-01 19:36:19.487451791 +0000 UTC m=+179.358481705" Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.488368 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jgt74" podStartSLOduration=2.531281499 podStartE2EDuration="36.488363339s" podCreationTimestamp="2025-12-01 19:35:43 +0000 UTC" firstStartedPulling="2025-12-01 19:35:45.05657317 +0000 UTC m=+144.927603084" lastFinishedPulling="2025-12-01 19:36:19.01365501 +0000 UTC m=+178.884684924" observedRunningTime="2025-12-01 19:36:19.486963066 +0000 UTC m=+179.357992970" watchObservedRunningTime="2025-12-01 19:36:19.488363339 +0000 UTC m=+179.359393253" Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.507977 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hqn42" podStartSLOduration=2.3751411190000002 podStartE2EDuration="38.50795918s" podCreationTimestamp="2025-12-01 19:35:41 +0000 UTC" firstStartedPulling="2025-12-01 19:35:43.010053672 +0000 UTC m=+142.881083576" lastFinishedPulling="2025-12-01 19:36:19.142871723 +0000 UTC m=+179.013901637" observedRunningTime="2025-12-01 19:36:19.504444764 +0000 UTC m=+179.375474688" watchObservedRunningTime="2025-12-01 19:36:19.50795918 +0000 UTC m=+179.378989094" Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.523292 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vt9c7" podStartSLOduration=2.472399868 podStartE2EDuration="38.523274653s" podCreationTimestamp="2025-12-01 19:35:41 +0000 UTC" firstStartedPulling="2025-12-01 19:35:43.011996361 +0000 UTC m=+142.883026275" lastFinishedPulling="2025-12-01 19:36:19.062871156 +0000 UTC m=+178.933901060" observedRunningTime="2025-12-01 19:36:19.521657334 +0000 UTC m=+179.392687248" watchObservedRunningTime="2025-12-01 19:36:19.523274653 +0000 UTC m=+179.394304567" Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.546966 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ppf8p" podStartSLOduration=2.36167117 podStartE2EDuration="37.546944918s" podCreationTimestamp="2025-12-01 19:35:42 +0000 UTC" firstStartedPulling="2025-12-01 19:35:44.046671745 +0000 UTC m=+143.917701659" lastFinishedPulling="2025-12-01 19:36:19.231945493 +0000 UTC m=+179.102975407" observedRunningTime="2025-12-01 19:36:19.544484654 +0000 UTC m=+179.415514578" watchObservedRunningTime="2025-12-01 19:36:19.546944918 +0000 UTC m=+179.417974832" Dec 01 19:36:19 crc kubenswrapper[4888]: I1201 19:36:19.567512 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-595jt" podStartSLOduration=2.5576434409999997 podStartE2EDuration="35.567494759s" podCreationTimestamp="2025-12-01 19:35:44 +0000 UTC" firstStartedPulling="2025-12-01 19:35:46.074041443 +0000 UTC m=+145.945071357" lastFinishedPulling="2025-12-01 19:36:19.083892761 +0000 UTC m=+178.954922675" observedRunningTime="2025-12-01 19:36:19.565953592 +0000 UTC m=+179.436983506" watchObservedRunningTime="2025-12-01 19:36:19.567494759 +0000 UTC m=+179.438524673" Dec 01 19:36:20 crc kubenswrapper[4888]: I1201 19:36:20.038068 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:36:20 crc kubenswrapper[4888]: I1201 19:36:20.038417 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:36:20 crc kubenswrapper[4888]: I1201 19:36:20.503377 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerStarted","Data":"13f08448097b2ad1ca9941675e877959aa8010a334977be36f6d59d46120c6ea"} Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.454744 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.454801 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.560826 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.585658 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pspwm" podStartSLOduration=4.293197943 podStartE2EDuration="37.585635858s" podCreationTimestamp="2025-12-01 19:35:44 +0000 UTC" firstStartedPulling="2025-12-01 19:35:46.069700091 +0000 UTC m=+145.940729995" lastFinishedPulling="2025-12-01 19:36:19.362137986 +0000 UTC m=+179.233167910" observedRunningTime="2025-12-01 19:36:20.528500457 +0000 UTC m=+180.399530371" watchObservedRunningTime="2025-12-01 19:36:21.585635858 +0000 UTC m=+181.456665772" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.667629 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.667699 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.710209 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.989104 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 19:36:21 crc kubenswrapper[4888]: E1201 19:36:21.989728 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ef5a47a-6141-4276-8d5a-9b3e8620fad0" containerName="pruner" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.989746 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ef5a47a-6141-4276-8d5a-9b3e8620fad0" containerName="pruner" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.989851 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ef5a47a-6141-4276-8d5a-9b3e8620fad0" containerName="pruner" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.990411 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.992111 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.992693 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 19:36:21 crc kubenswrapper[4888]: I1201 19:36:21.997971 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.059653 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.059725 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.161074 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.161176 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.161584 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.180775 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.305392 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:22 crc kubenswrapper[4888]: I1201 19:36:22.704133 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 19:36:22 crc kubenswrapper[4888]: W1201 19:36:22.705272 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1cf9f0b0_866a_4293_ad9a_ae7a561a73e6.slice/crio-c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95 WatchSource:0}: Error finding container c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95: Status 404 returned error can't find the container with id c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95 Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.163489 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.163812 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.206432 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.521327 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6","Type":"ContainerStarted","Data":"c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95"} Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.632530 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.633448 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.676438 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:23 crc kubenswrapper[4888]: I1201 19:36:23.800600 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.393667 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.394428 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.532348 4888 generic.go:334] "Generic (PLEG): container finished" podID="1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" containerID="9e6366d97b126d106255a220731ccf4c71397d0f95dc16105d391e0f76b743d3" exitCode=0 Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.532460 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6","Type":"ContainerDied","Data":"9e6366d97b126d106255a220731ccf4c71397d0f95dc16105d391e0f76b743d3"} Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.584670 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.768076 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.768143 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:24 crc kubenswrapper[4888]: I1201 19:36:24.827592 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.431725 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pspwm" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="registry-server" probeResult="failure" output=< Dec 01 19:36:25 crc kubenswrapper[4888]: timeout: failed to connect service ":50051" within 1s Dec 01 19:36:25 crc kubenswrapper[4888]: > Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.581390 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.835943 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.905288 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access\") pod \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.905422 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir\") pod \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\" (UID: \"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6\") " Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.905557 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" (UID: "1cf9f0b0-866a-4293-ad9a-ae7a561a73e6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.905759 4888 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:25 crc kubenswrapper[4888]: I1201 19:36:25.911706 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" (UID: "1cf9f0b0-866a-4293-ad9a-ae7a561a73e6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:26 crc kubenswrapper[4888]: I1201 19:36:26.009048 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1cf9f0b0-866a-4293-ad9a-ae7a561a73e6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:26 crc kubenswrapper[4888]: I1201 19:36:26.543122 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1cf9f0b0-866a-4293-ad9a-ae7a561a73e6","Type":"ContainerDied","Data":"c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95"} Dec 01 19:36:26 crc kubenswrapper[4888]: I1201 19:36:26.543174 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c72e16ac4d998aa0cb5715263c37f17dfef5bc10adaaca3bc4c09845d026fd95" Dec 01 19:36:26 crc kubenswrapper[4888]: I1201 19:36:26.543230 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 19:36:26 crc kubenswrapper[4888]: I1201 19:36:26.577275 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 19:36:27 crc kubenswrapper[4888]: I1201 19:36:27.647061 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:36:27 crc kubenswrapper[4888]: I1201 19:36:27.647306 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jgt74" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="registry-server" containerID="cri-o://d4b34ea579c316de84216252b88ef654b2e75e28e56be88dcab33ba7f69bd665" gracePeriod=2 Dec 01 19:36:28 crc kubenswrapper[4888]: I1201 19:36:28.556449 4888 generic.go:334] "Generic (PLEG): container finished" podID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerID="b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6" exitCode=0 Dec 01 19:36:28 crc kubenswrapper[4888]: I1201 19:36:28.556617 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerDied","Data":"b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6"} Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.566102 4888 generic.go:334] "Generic (PLEG): container finished" podID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerID="d4b34ea579c316de84216252b88ef654b2e75e28e56be88dcab33ba7f69bd665" exitCode=0 Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.566961 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerDied","Data":"d4b34ea579c316de84216252b88ef654b2e75e28e56be88dcab33ba7f69bd665"} Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.851966 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.852279 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-595jt" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="registry-server" containerID="cri-o://b44acb3e3124ec1da759a6f2a729c71e4bcf343db89566a5503e1f926fd32572" gracePeriod=2 Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.985128 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 19:36:29 crc kubenswrapper[4888]: E1201 19:36:29.985521 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" containerName="pruner" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.985540 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" containerName="pruner" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.985648 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cf9f0b0-866a-4293-ad9a-ae7a561a73e6" containerName="pruner" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.986178 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.988720 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.988796 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 19:36:29 crc kubenswrapper[4888]: I1201 19:36:29.997931 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.033501 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.052088 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.052140 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.052157 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153203 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities\") pod \"285e4764-a232-44ab-a2ee-82b1bd2c154c\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153339 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content\") pod \"285e4764-a232-44ab-a2ee-82b1bd2c154c\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153407 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-955vh\" (UniqueName: \"kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh\") pod \"285e4764-a232-44ab-a2ee-82b1bd2c154c\" (UID: \"285e4764-a232-44ab-a2ee-82b1bd2c154c\") " Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153636 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153668 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153687 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153721 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.153749 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.154387 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities" (OuterVolumeSpecName: "utilities") pod "285e4764-a232-44ab-a2ee-82b1bd2c154c" (UID: "285e4764-a232-44ab-a2ee-82b1bd2c154c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.159858 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh" (OuterVolumeSpecName: "kube-api-access-955vh") pod "285e4764-a232-44ab-a2ee-82b1bd2c154c" (UID: "285e4764-a232-44ab-a2ee-82b1bd2c154c"). InnerVolumeSpecName "kube-api-access-955vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.171038 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access\") pod \"installer-9-crc\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.182178 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "285e4764-a232-44ab-a2ee-82b1bd2c154c" (UID: "285e4764-a232-44ab-a2ee-82b1bd2c154c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.255676 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.255749 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-955vh\" (UniqueName: \"kubernetes.io/projected/285e4764-a232-44ab-a2ee-82b1bd2c154c-kube-api-access-955vh\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.255765 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/285e4764-a232-44ab-a2ee-82b1bd2c154c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.330065 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.572250 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jgt74" event={"ID":"285e4764-a232-44ab-a2ee-82b1bd2c154c","Type":"ContainerDied","Data":"f9e2846c7292e5c7eb20a2c2d59a2279231670f06f567ad9b82ad61e1502dd22"} Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.572301 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jgt74" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.572306 4888 scope.go:117] "RemoveContainer" containerID="d4b34ea579c316de84216252b88ef654b2e75e28e56be88dcab33ba7f69bd665" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.592384 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.597237 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jgt74"] Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.598272 4888 scope.go:117] "RemoveContainer" containerID="a12233db1b9431f2bed4faf4093611dd0f980b290cca5a2ee4c57181cb4f0679" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.612043 4888 scope.go:117] "RemoveContainer" containerID="f5a04cf9eb02d25b9f82191ae66a579852a3ae2035cf46fdf838b062c6a94ec3" Dec 01 19:36:30 crc kubenswrapper[4888]: I1201 19:36:30.732696 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 19:36:30 crc kubenswrapper[4888]: W1201 19:36:30.737123 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf3df77ab_d231_4b67_8813_f1afa968973d.slice/crio-40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21 WatchSource:0}: Error finding container 40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21: Status 404 returned error can't find the container with id 40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21 Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.505744 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.580948 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f3df77ab-d231-4b67-8813-f1afa968973d","Type":"ContainerStarted","Data":"4bb20c102a8ef1c5574beee6ba4ffda401c07b2eb1091bdff4cf520e73373521"} Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.580997 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f3df77ab-d231-4b67-8813-f1afa968973d","Type":"ContainerStarted","Data":"40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21"} Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.584404 4888 generic.go:334] "Generic (PLEG): container finished" podID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerID="b44acb3e3124ec1da759a6f2a729c71e4bcf343db89566a5503e1f926fd32572" exitCode=0 Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.584441 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerDied","Data":"b44acb3e3124ec1da759a6f2a729c71e4bcf343db89566a5503e1f926fd32572"} Dec 01 19:36:31 crc kubenswrapper[4888]: I1201 19:36:31.708392 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.240221 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.384040 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities\") pod \"c7792e13-fb99-4efb-aa5e-d80c70835269\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.384097 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content\") pod \"c7792e13-fb99-4efb-aa5e-d80c70835269\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.384287 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrwhl\" (UniqueName: \"kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl\") pod \"c7792e13-fb99-4efb-aa5e-d80c70835269\" (UID: \"c7792e13-fb99-4efb-aa5e-d80c70835269\") " Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.385232 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities" (OuterVolumeSpecName: "utilities") pod "c7792e13-fb99-4efb-aa5e-d80c70835269" (UID: "c7792e13-fb99-4efb-aa5e-d80c70835269"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.393649 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl" (OuterVolumeSpecName: "kube-api-access-vrwhl") pod "c7792e13-fb99-4efb-aa5e-d80c70835269" (UID: "c7792e13-fb99-4efb-aa5e-d80c70835269"). InnerVolumeSpecName "kube-api-access-vrwhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.458374 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" path="/var/lib/kubelet/pods/285e4764-a232-44ab-a2ee-82b1bd2c154c/volumes" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.485415 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.485457 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrwhl\" (UniqueName: \"kubernetes.io/projected/c7792e13-fb99-4efb-aa5e-d80c70835269-kube-api-access-vrwhl\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.501515 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c7792e13-fb99-4efb-aa5e-d80c70835269" (UID: "c7792e13-fb99-4efb-aa5e-d80c70835269"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.586394 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c7792e13-fb99-4efb-aa5e-d80c70835269-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.592724 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerStarted","Data":"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb"} Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.594895 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerStarted","Data":"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83"} Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.597723 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-595jt" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.600262 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-595jt" event={"ID":"c7792e13-fb99-4efb-aa5e-d80c70835269","Type":"ContainerDied","Data":"599cb96ec33d50ce74946313f5ffac1aece058a98e7839b9c13bfc20165215a1"} Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.600347 4888 scope.go:117] "RemoveContainer" containerID="b44acb3e3124ec1da759a6f2a729c71e4bcf343db89566a5503e1f926fd32572" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.617431 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nzbl6" podStartSLOduration=2.853468608 podStartE2EDuration="51.617404611s" podCreationTimestamp="2025-12-01 19:35:41 +0000 UTC" firstStartedPulling="2025-12-01 19:35:43.015642331 +0000 UTC m=+142.886672245" lastFinishedPulling="2025-12-01 19:36:31.779578324 +0000 UTC m=+191.650608248" observedRunningTime="2025-12-01 19:36:32.614992448 +0000 UTC m=+192.486022352" watchObservedRunningTime="2025-12-01 19:36:32.617404611 +0000 UTC m=+192.488434545" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.627211 4888 scope.go:117] "RemoveContainer" containerID="35b474013b7b7eb2b430ebb91eac8baa23ae41e1f2e07404b39b1fe43e065ed7" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.653715 4888 scope.go:117] "RemoveContainer" containerID="1a6856137e8a64f5e85b3f8a18224a1a9cdd82917948de43c6b2192b1024cdca" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.660421 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.660391909 podStartE2EDuration="3.660391909s" podCreationTimestamp="2025-12-01 19:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:36:32.656237934 +0000 UTC m=+192.527267848" watchObservedRunningTime="2025-12-01 19:36:32.660391909 +0000 UTC m=+192.531421823" Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.684601 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:36:32 crc kubenswrapper[4888]: I1201 19:36:32.693243 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-595jt"] Dec 01 19:36:33 crc kubenswrapper[4888]: I1201 19:36:33.203738 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:36:33 crc kubenswrapper[4888]: I1201 19:36:33.604622 4888 generic.go:334] "Generic (PLEG): container finished" podID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerID="b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83" exitCode=0 Dec 01 19:36:33 crc kubenswrapper[4888]: I1201 19:36:33.604699 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerDied","Data":"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83"} Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.437695 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.460665 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" path="/var/lib/kubelet/pods/c7792e13-fb99-4efb-aa5e-d80c70835269/volumes" Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.484970 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.614463 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerStarted","Data":"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af"} Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.632715 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-665fr" podStartSLOduration=3.268819959 podStartE2EDuration="54.632698965s" podCreationTimestamp="2025-12-01 19:35:40 +0000 UTC" firstStartedPulling="2025-12-01 19:35:43.008473805 +0000 UTC m=+142.879503719" lastFinishedPulling="2025-12-01 19:36:34.372352811 +0000 UTC m=+194.243382725" observedRunningTime="2025-12-01 19:36:34.630382425 +0000 UTC m=+194.501412339" watchObservedRunningTime="2025-12-01 19:36:34.632698965 +0000 UTC m=+194.503728879" Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.847613 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:36:34 crc kubenswrapper[4888]: I1201 19:36:34.848060 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vt9c7" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="registry-server" containerID="cri-o://7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999" gracePeriod=2 Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.220536 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.325671 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9fkq\" (UniqueName: \"kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq\") pod \"f9a74618-f827-452a-988f-c7f314143925\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.325705 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities\") pod \"f9a74618-f827-452a-988f-c7f314143925\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.325795 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content\") pod \"f9a74618-f827-452a-988f-c7f314143925\" (UID: \"f9a74618-f827-452a-988f-c7f314143925\") " Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.326906 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities" (OuterVolumeSpecName: "utilities") pod "f9a74618-f827-452a-988f-c7f314143925" (UID: "f9a74618-f827-452a-988f-c7f314143925"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.332147 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq" (OuterVolumeSpecName: "kube-api-access-k9fkq") pod "f9a74618-f827-452a-988f-c7f314143925" (UID: "f9a74618-f827-452a-988f-c7f314143925"). InnerVolumeSpecName "kube-api-access-k9fkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.382228 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9a74618-f827-452a-988f-c7f314143925" (UID: "f9a74618-f827-452a-988f-c7f314143925"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.427223 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9fkq\" (UniqueName: \"kubernetes.io/projected/f9a74618-f827-452a-988f-c7f314143925-kube-api-access-k9fkq\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.427263 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.427282 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a74618-f827-452a-988f-c7f314143925-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.622088 4888 generic.go:334] "Generic (PLEG): container finished" podID="f9a74618-f827-452a-988f-c7f314143925" containerID="7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999" exitCode=0 Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.622807 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerDied","Data":"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999"} Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.622927 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vt9c7" event={"ID":"f9a74618-f827-452a-988f-c7f314143925","Type":"ContainerDied","Data":"8fbf10f79519da7d280683e5f8cbab5e53ba9fb7515ad4b753d84bc983558551"} Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.623029 4888 scope.go:117] "RemoveContainer" containerID="7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.623323 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vt9c7" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.649265 4888 scope.go:117] "RemoveContainer" containerID="11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.660023 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.666477 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vt9c7"] Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.684144 4888 scope.go:117] "RemoveContainer" containerID="c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.703081 4888 scope.go:117] "RemoveContainer" containerID="7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999" Dec 01 19:36:35 crc kubenswrapper[4888]: E1201 19:36:35.703551 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999\": container with ID starting with 7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999 not found: ID does not exist" containerID="7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.703587 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999"} err="failed to get container status \"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999\": rpc error: code = NotFound desc = could not find container \"7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999\": container with ID starting with 7a7902f8ceb6486611aafe6f90788ed6fc50fc64133b9b53ca7bbcce97c2c999 not found: ID does not exist" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.703634 4888 scope.go:117] "RemoveContainer" containerID="11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0" Dec 01 19:36:35 crc kubenswrapper[4888]: E1201 19:36:35.704345 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0\": container with ID starting with 11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0 not found: ID does not exist" containerID="11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.704402 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0"} err="failed to get container status \"11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0\": rpc error: code = NotFound desc = could not find container \"11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0\": container with ID starting with 11d263e30fa1d645da8c5bcd281e1d91be3d3190c29880d26ad62f3bdbfb9df0 not found: ID does not exist" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.704436 4888 scope.go:117] "RemoveContainer" containerID="c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294" Dec 01 19:36:35 crc kubenswrapper[4888]: E1201 19:36:35.704774 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294\": container with ID starting with c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294 not found: ID does not exist" containerID="c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294" Dec 01 19:36:35 crc kubenswrapper[4888]: I1201 19:36:35.704810 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294"} err="failed to get container status \"c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294\": rpc error: code = NotFound desc = could not find container \"c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294\": container with ID starting with c27a84cb6cd31e22151200da278e4a948a84238d1fd8e76ab55b3f3a9ed55294 not found: ID does not exist" Dec 01 19:36:36 crc kubenswrapper[4888]: I1201 19:36:36.457811 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a74618-f827-452a-988f-c7f314143925" path="/var/lib/kubelet/pods/f9a74618-f827-452a-988f-c7f314143925/volumes" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.235720 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.236424 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.276945 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.685725 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.836820 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.836873 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:41 crc kubenswrapper[4888]: I1201 19:36:41.875343 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:42 crc kubenswrapper[4888]: I1201 19:36:42.709501 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.079463 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.080004 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nzbl6" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="registry-server" containerID="cri-o://622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb" gracePeriod=2 Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.463074 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.656588 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities\") pod \"78e273f1-d652-46e4-afe2-f9691d8d48e2\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.656823 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nd5v\" (UniqueName: \"kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v\") pod \"78e273f1-d652-46e4-afe2-f9691d8d48e2\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.658028 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities" (OuterVolumeSpecName: "utilities") pod "78e273f1-d652-46e4-afe2-f9691d8d48e2" (UID: "78e273f1-d652-46e4-afe2-f9691d8d48e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.658301 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content\") pod \"78e273f1-d652-46e4-afe2-f9691d8d48e2\" (UID: \"78e273f1-d652-46e4-afe2-f9691d8d48e2\") " Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.658535 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.662301 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v" (OuterVolumeSpecName: "kube-api-access-9nd5v") pod "78e273f1-d652-46e4-afe2-f9691d8d48e2" (UID: "78e273f1-d652-46e4-afe2-f9691d8d48e2"). InnerVolumeSpecName "kube-api-access-9nd5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.677987 4888 generic.go:334] "Generic (PLEG): container finished" podID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerID="622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb" exitCode=0 Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.678034 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerDied","Data":"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb"} Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.678062 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzbl6" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.678072 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzbl6" event={"ID":"78e273f1-d652-46e4-afe2-f9691d8d48e2","Type":"ContainerDied","Data":"ac629657e9c13ab59dc6f33d154282784ef6b185acce102d67fb51d230780a74"} Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.678092 4888 scope.go:117] "RemoveContainer" containerID="622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.697772 4888 scope.go:117] "RemoveContainer" containerID="b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.717444 4888 scope.go:117] "RemoveContainer" containerID="c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.718535 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78e273f1-d652-46e4-afe2-f9691d8d48e2" (UID: "78e273f1-d652-46e4-afe2-f9691d8d48e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.734880 4888 scope.go:117] "RemoveContainer" containerID="622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb" Dec 01 19:36:45 crc kubenswrapper[4888]: E1201 19:36:45.735616 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb\": container with ID starting with 622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb not found: ID does not exist" containerID="622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.735675 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb"} err="failed to get container status \"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb\": rpc error: code = NotFound desc = could not find container \"622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb\": container with ID starting with 622a16d6d951f141dd733464d50d9bed8b4f530e49a64bc3a6bbfe60454caefb not found: ID does not exist" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.735707 4888 scope.go:117] "RemoveContainer" containerID="b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6" Dec 01 19:36:45 crc kubenswrapper[4888]: E1201 19:36:45.736042 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6\": container with ID starting with b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6 not found: ID does not exist" containerID="b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.736179 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6"} err="failed to get container status \"b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6\": rpc error: code = NotFound desc = could not find container \"b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6\": container with ID starting with b20aa2b3767196f559608c3267dfc33955469caa8110d8b03c0f4a1d20e4bff6 not found: ID does not exist" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.736328 4888 scope.go:117] "RemoveContainer" containerID="c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31" Dec 01 19:36:45 crc kubenswrapper[4888]: E1201 19:36:45.736715 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31\": container with ID starting with c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31 not found: ID does not exist" containerID="c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.736831 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31"} err="failed to get container status \"c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31\": rpc error: code = NotFound desc = could not find container \"c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31\": container with ID starting with c283e0c6a8e0907ece9fd9898b596b02877e294f627f478d612969d21a095e31 not found: ID does not exist" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.759127 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nd5v\" (UniqueName: \"kubernetes.io/projected/78e273f1-d652-46e4-afe2-f9691d8d48e2-kube-api-access-9nd5v\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:45 crc kubenswrapper[4888]: I1201 19:36:45.759493 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78e273f1-d652-46e4-afe2-f9691d8d48e2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:46 crc kubenswrapper[4888]: I1201 19:36:46.015419 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:36:46 crc kubenswrapper[4888]: I1201 19:36:46.018853 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nzbl6"] Dec 01 19:36:46 crc kubenswrapper[4888]: I1201 19:36:46.457562 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" path="/var/lib/kubelet/pods/78e273f1-d652-46e4-afe2-f9691d8d48e2/volumes" Dec 01 19:36:48 crc kubenswrapper[4888]: I1201 19:36:48.823633 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerName="oauth-openshift" containerID="cri-o://da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657" gracePeriod=15 Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.233875 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315483 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315527 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315732 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315816 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jh7x\" (UniqueName: \"kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315853 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315896 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315952 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.315996 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316030 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316064 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316104 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316132 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316155 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316195 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection\") pod \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\" (UID: \"456a034d-a3c3-4cae-b0a1-4f5d0569ec08\") " Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316801 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.316821 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.317122 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.317176 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.317232 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.323175 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.324088 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.324221 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.324434 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x" (OuterVolumeSpecName: "kube-api-access-8jh7x") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "kube-api-access-8jh7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.324946 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.325821 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.325953 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.326721 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.328929 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "456a034d-a3c3-4cae-b0a1-4f5d0569ec08" (UID: "456a034d-a3c3-4cae-b0a1-4f5d0569ec08"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416873 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416912 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416922 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416934 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416945 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416956 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416965 4888 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.416973 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417000 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417009 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417019 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417027 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jh7x\" (UniqueName: \"kubernetes.io/projected/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-kube-api-access-8jh7x\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417035 4888 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.417043 4888 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/456a034d-a3c3-4cae-b0a1-4f5d0569ec08-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.704247 4888 generic.go:334] "Generic (PLEG): container finished" podID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerID="da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657" exitCode=0 Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.704297 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" event={"ID":"456a034d-a3c3-4cae-b0a1-4f5d0569ec08","Type":"ContainerDied","Data":"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657"} Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.704325 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" event={"ID":"456a034d-a3c3-4cae-b0a1-4f5d0569ec08","Type":"ContainerDied","Data":"ba7c3d89b0c1d95b7fdb2ae64b46e200b780fe899c08e5a1031df631a4bdf44b"} Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.704330 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5ksc9" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.704342 4888 scope.go:117] "RemoveContainer" containerID="da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.722096 4888 scope.go:117] "RemoveContainer" containerID="da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657" Dec 01 19:36:49 crc kubenswrapper[4888]: E1201 19:36:49.722695 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657\": container with ID starting with da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657 not found: ID does not exist" containerID="da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.722776 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657"} err="failed to get container status \"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657\": rpc error: code = NotFound desc = could not find container \"da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657\": container with ID starting with da287d5a656ffd6e5ba2dfb5389e1aa930dab782760257da21eb146a6dba2657 not found: ID does not exist" Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.734643 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:36:49 crc kubenswrapper[4888]: I1201 19:36:49.740395 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5ksc9"] Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.037573 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.037652 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.037713 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.038357 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.038417 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252" gracePeriod=600 Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.466725 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" path="/var/lib/kubelet/pods/456a034d-a3c3-4cae-b0a1-4f5d0569ec08/volumes" Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.713960 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252" exitCode=0 Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.714018 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252"} Dec 01 19:36:50 crc kubenswrapper[4888]: I1201 19:36:50.714617 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055"} Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.669075 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6dc8875d65-pnq8z"] Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.670752 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.670850 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.670928 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671042 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671122 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671214 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671303 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671378 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671453 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerName="oauth-openshift" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671537 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerName="oauth-openshift" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671628 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671712 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671799 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.671886 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.671968 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672045 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.672132 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672225 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.672304 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672378 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.672463 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672536 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="extract-utilities" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.672621 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672699 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: E1201 19:36:52.672784 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.672859 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="extract-content" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.673061 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="285e4764-a232-44ab-a2ee-82b1bd2c154c" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.673149 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="456a034d-a3c3-4cae-b0a1-4f5d0569ec08" containerName="oauth-openshift" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.673263 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7792e13-fb99-4efb-aa5e-d80c70835269" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.673341 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e273f1-d652-46e4-afe2-f9691d8d48e2" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.673415 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a74618-f827-452a-988f-c7f314143925" containerName="registry-server" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.674131 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.675755 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.676830 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.676926 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.677506 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.677532 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.677711 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.677929 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.678102 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.678130 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.678280 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.678859 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.679911 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.683644 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.693745 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6dc8875d65-pnq8z"] Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.693896 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.694340 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862114 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862213 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862242 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862347 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862388 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862412 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862440 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862465 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862493 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862590 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-audit-policies\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862622 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3c0e622b-f233-405b-9080-cbd850d756b9-audit-dir\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862642 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn76n\" (UniqueName: \"kubernetes.io/projected/3c0e622b-f233-405b-9080-cbd850d756b9-kube-api-access-zn76n\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862663 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.862686 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-session\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963422 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963460 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963490 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963519 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963533 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963549 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963627 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963759 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.963782 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-audit-policies\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964328 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3c0e622b-f233-405b-9080-cbd850d756b9-audit-dir\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964437 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3c0e622b-f233-405b-9080-cbd850d756b9-audit-dir\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964795 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn76n\" (UniqueName: \"kubernetes.io/projected/3c0e622b-f233-405b-9080-cbd850d756b9-kube-api-access-zn76n\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964828 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964875 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-session\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.964906 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.965308 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-audit-policies\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.965368 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.965453 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-service-ca\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.965997 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.969674 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-session\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.969690 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.970106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-login\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.970097 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-error\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.970321 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.970644 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.971019 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.972598 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3c0e622b-f233-405b-9080-cbd850d756b9-v4-0-config-system-router-certs\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.981175 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn76n\" (UniqueName: \"kubernetes.io/projected/3c0e622b-f233-405b-9080-cbd850d756b9-kube-api-access-zn76n\") pod \"oauth-openshift-6dc8875d65-pnq8z\" (UID: \"3c0e622b-f233-405b-9080-cbd850d756b9\") " pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:52 crc kubenswrapper[4888]: I1201 19:36:52.990395 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:53 crc kubenswrapper[4888]: I1201 19:36:53.375523 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6dc8875d65-pnq8z"] Dec 01 19:36:53 crc kubenswrapper[4888]: I1201 19:36:53.729166 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" event={"ID":"3c0e622b-f233-405b-9080-cbd850d756b9","Type":"ContainerStarted","Data":"e7bef8b5a5c4306c7e00b5d366334408205dd35e2c5184bb2cd5676c687cb487"} Dec 01 19:36:53 crc kubenswrapper[4888]: I1201 19:36:53.729603 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" event={"ID":"3c0e622b-f233-405b-9080-cbd850d756b9","Type":"ContainerStarted","Data":"937c737708d7d26c303234c1eab8a21db541d515ff8f7207eecb47badd6b6f7b"} Dec 01 19:36:53 crc kubenswrapper[4888]: I1201 19:36:53.729627 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:36:53 crc kubenswrapper[4888]: I1201 19:36:53.758342 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" podStartSLOduration=30.758305615 podStartE2EDuration="30.758305615s" podCreationTimestamp="2025-12-01 19:36:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:36:53.750354788 +0000 UTC m=+213.621384722" watchObservedRunningTime="2025-12-01 19:36:53.758305615 +0000 UTC m=+213.629335549" Dec 01 19:36:54 crc kubenswrapper[4888]: I1201 19:36:54.018006 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6dc8875d65-pnq8z" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.227764 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-665fr"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.229091 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-665fr" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="registry-server" containerID="cri-o://0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af" gracePeriod=30 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.240482 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hqn42"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.240769 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hqn42" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="registry-server" containerID="cri-o://653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7" gracePeriod=30 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.254145 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-t425v"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.254411 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerName="marketplace-operator" containerID="cri-o://70d02ffd313add1ebce42c8ad31e9c7cb315e752b2a8c867b4ae09a3af7e4bb3" gracePeriod=30 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.270318 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf8p"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.270712 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ppf8p" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="registry-server" containerID="cri-o://aad7c55e6a61ec3e5533234bd3a580bba65c0081073e50854948479f22174ec5" gracePeriod=30 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.273221 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w5r85"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.274252 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.279510 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pspwm"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.279791 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pspwm" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="registry-server" containerID="cri-o://13f08448097b2ad1ca9941675e877959aa8010a334977be36f6d59d46120c6ea" gracePeriod=30 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.294682 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w5r85"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.363154 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.363468 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwlxm\" (UniqueName: \"kubernetes.io/projected/08dcfb84-e006-4100-8a3a-26dc77a68e61-kube-api-access-lwlxm\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.363568 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.465515 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.465903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.465929 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwlxm\" (UniqueName: \"kubernetes.io/projected/08dcfb84-e006-4100-8a3a-26dc77a68e61-kube-api-access-lwlxm\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.467749 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.479988 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/08dcfb84-e006-4100-8a3a-26dc77a68e61-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.485025 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwlxm\" (UniqueName: \"kubernetes.io/projected/08dcfb84-e006-4100-8a3a-26dc77a68e61-kube-api-access-lwlxm\") pod \"marketplace-operator-79b997595-w5r85\" (UID: \"08dcfb84-e006-4100-8a3a-26dc77a68e61\") " pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.597752 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.728862 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.734266 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.811372 4888 generic.go:334] "Generic (PLEG): container finished" podID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerID="aad7c55e6a61ec3e5533234bd3a580bba65c0081073e50854948479f22174ec5" exitCode=0 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.811457 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerDied","Data":"aad7c55e6a61ec3e5533234bd3a580bba65c0081073e50854948479f22174ec5"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.815600 4888 generic.go:334] "Generic (PLEG): container finished" podID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerID="0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af" exitCode=0 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.815713 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-665fr" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.816305 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerDied","Data":"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.816337 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-665fr" event={"ID":"12306c7a-35ef-4c7c-9d19-dc9463a224d0","Type":"ContainerDied","Data":"d62fa5e8071e7d6018096931f36d14dbe3004677c3fefe187b206deefd7edb87"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.816358 4888 scope.go:117] "RemoveContainer" containerID="0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.818241 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.822156 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.822437 4888 generic.go:334] "Generic (PLEG): container finished" podID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerID="13f08448097b2ad1ca9941675e877959aa8010a334977be36f6d59d46120c6ea" exitCode=0 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.822534 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerDied","Data":"13f08448097b2ad1ca9941675e877959aa8010a334977be36f6d59d46120c6ea"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.828509 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hqn42" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.828555 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerDied","Data":"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.828496 4888 generic.go:334] "Generic (PLEG): container finished" podID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerID="653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7" exitCode=0 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.828714 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hqn42" event={"ID":"f87ad7cb-a463-4db6-895b-c57bf55140b9","Type":"ContainerDied","Data":"5038007ee77de8ab45777ca047f019fbac8cca5799f9b032ff3b33a672d51c51"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.830638 4888 generic.go:334] "Generic (PLEG): container finished" podID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerID="70d02ffd313add1ebce42c8ad31e9c7cb315e752b2a8c867b4ae09a3af7e4bb3" exitCode=0 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.830687 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" event={"ID":"1a9245f3-0247-4dd6-b4c8-0658f524bc1c","Type":"ContainerDied","Data":"70d02ffd313add1ebce42c8ad31e9c7cb315e752b2a8c867b4ae09a3af7e4bb3"} Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.830758 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.840171 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.844278 4888 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845215 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845237 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845253 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845261 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845273 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845307 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845322 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845331 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845408 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845416 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845433 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845441 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845456 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845462 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845471 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845481 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845494 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerName="marketplace-operator" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845502 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerName="marketplace-operator" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845512 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845519 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845528 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845536 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="extract-content" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845545 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845551 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.845560 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845569 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="extract-utilities" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845708 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845719 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845738 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" containerName="marketplace-operator" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845748 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.845756 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" containerName="registry-server" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846315 4888 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846348 4888 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846493 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846503 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846517 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846524 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846535 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846542 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846548 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846554 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846745 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846760 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846767 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846786 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846791 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.846808 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846814 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846886 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10" gracePeriod=15 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846978 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846988 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.846996 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.847007 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.847016 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.847022 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.848438 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9" gracePeriod=15 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.848527 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63" gracePeriod=15 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.848583 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84" gracePeriod=15 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.848600 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3" gracePeriod=15 Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.862265 4888 scope.go:117] "RemoveContainer" containerID="b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874491 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content\") pod \"f87ad7cb-a463-4db6-895b-c57bf55140b9\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874549 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksz8f\" (UniqueName: \"kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f\") pod \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874589 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities\") pod \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874632 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content\") pod \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\" (UID: \"12306c7a-35ef-4c7c-9d19-dc9463a224d0\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874656 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities\") pod \"f87ad7cb-a463-4db6-895b-c57bf55140b9\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.874736 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27tpw\" (UniqueName: \"kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw\") pod \"f87ad7cb-a463-4db6-895b-c57bf55140b9\" (UID: \"f87ad7cb-a463-4db6-895b-c57bf55140b9\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.876080 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities" (OuterVolumeSpecName: "utilities") pod "f87ad7cb-a463-4db6-895b-c57bf55140b9" (UID: "f87ad7cb-a463-4db6-895b-c57bf55140b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.876333 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities" (OuterVolumeSpecName: "utilities") pod "12306c7a-35ef-4c7c-9d19-dc9463a224d0" (UID: "12306c7a-35ef-4c7c-9d19-dc9463a224d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.893694 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f" (OuterVolumeSpecName: "kube-api-access-ksz8f") pod "12306c7a-35ef-4c7c-9d19-dc9463a224d0" (UID: "12306c7a-35ef-4c7c-9d19-dc9463a224d0"). InnerVolumeSpecName "kube-api-access-ksz8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.894132 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.894258 4888 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.163:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.909011 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw" (OuterVolumeSpecName: "kube-api-access-27tpw") pod "f87ad7cb-a463-4db6-895b-c57bf55140b9" (UID: "f87ad7cb-a463-4db6-895b-c57bf55140b9"). InnerVolumeSpecName "kube-api-access-27tpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.914743 4888 scope.go:117] "RemoveContainer" containerID="d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.949050 4888 scope.go:117] "RemoveContainer" containerID="0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.949731 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af\": container with ID starting with 0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af not found: ID does not exist" containerID="0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.949765 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af"} err="failed to get container status \"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af\": rpc error: code = NotFound desc = could not find container \"0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af\": container with ID starting with 0f6358e9d5eaa62c63d90d347a79a208bfb33e8350dd55583272e4cb86c354af not found: ID does not exist" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.949791 4888 scope.go:117] "RemoveContainer" containerID="b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.950401 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83\": container with ID starting with b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83 not found: ID does not exist" containerID="b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.950430 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83"} err="failed to get container status \"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83\": rpc error: code = NotFound desc = could not find container \"b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83\": container with ID starting with b825771fd6f6081d05d8c8c55ec3a7c4c38f0ea7300ef357ba0431897581ef83 not found: ID does not exist" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.950624 4888 scope.go:117] "RemoveContainer" containerID="d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b" Dec 01 19:37:08 crc kubenswrapper[4888]: E1201 19:37:08.952252 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b\": container with ID starting with d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b not found: ID does not exist" containerID="d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.952282 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b"} err="failed to get container status \"d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b\": rpc error: code = NotFound desc = could not find container \"d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b\": container with ID starting with d2bdaa6e4d22d0f5c1436a1c54a987133f7c93ea251c6d999d4e530eeaca3a9b not found: ID does not exist" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.952301 4888 scope.go:117] "RemoveContainer" containerID="653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.959003 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f87ad7cb-a463-4db6-895b-c57bf55140b9" (UID: "f87ad7cb-a463-4db6-895b-c57bf55140b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.961603 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12306c7a-35ef-4c7c-9d19-dc9463a224d0" (UID: "12306c7a-35ef-4c7c-9d19-dc9463a224d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975684 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5fd5\" (UniqueName: \"kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5\") pod \"25e7abac-5f63-4c75-91d9-e801ccf75389\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975744 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities\") pod \"25e7abac-5f63-4c75-91d9-e801ccf75389\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975773 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities\") pod \"f1785038-0d12-43ae-9f01-774cabc5ef89\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975801 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh8vp\" (UniqueName: \"kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp\") pod \"f1785038-0d12-43ae-9f01-774cabc5ef89\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975824 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content\") pod \"f1785038-0d12-43ae-9f01-774cabc5ef89\" (UID: \"f1785038-0d12-43ae-9f01-774cabc5ef89\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975855 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mssl\" (UniqueName: \"kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl\") pod \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975894 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content\") pod \"25e7abac-5f63-4c75-91d9-e801ccf75389\" (UID: \"25e7abac-5f63-4c75-91d9-e801ccf75389\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975941 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") pod \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.975971 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") pod \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\" (UID: \"1a9245f3-0247-4dd6-b4c8-0658f524bc1c\") " Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976079 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976124 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976167 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976213 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976245 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976293 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976312 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976335 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976379 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976391 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksz8f\" (UniqueName: \"kubernetes.io/projected/12306c7a-35ef-4c7c-9d19-dc9463a224d0-kube-api-access-ksz8f\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976401 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976411 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12306c7a-35ef-4c7c-9d19-dc9463a224d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976420 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f87ad7cb-a463-4db6-895b-c57bf55140b9-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.976429 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27tpw\" (UniqueName: \"kubernetes.io/projected/f87ad7cb-a463-4db6-895b-c57bf55140b9-kube-api-access-27tpw\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.977829 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities" (OuterVolumeSpecName: "utilities") pod "25e7abac-5f63-4c75-91d9-e801ccf75389" (UID: "25e7abac-5f63-4c75-91d9-e801ccf75389"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.979525 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities" (OuterVolumeSpecName: "utilities") pod "f1785038-0d12-43ae-9f01-774cabc5ef89" (UID: "f1785038-0d12-43ae-9f01-774cabc5ef89"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.980315 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "1a9245f3-0247-4dd6-b4c8-0658f524bc1c" (UID: "1a9245f3-0247-4dd6-b4c8-0658f524bc1c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.982021 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl" (OuterVolumeSpecName: "kube-api-access-4mssl") pod "1a9245f3-0247-4dd6-b4c8-0658f524bc1c" (UID: "1a9245f3-0247-4dd6-b4c8-0658f524bc1c"). InnerVolumeSpecName "kube-api-access-4mssl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.982527 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5" (OuterVolumeSpecName: "kube-api-access-l5fd5") pod "25e7abac-5f63-4c75-91d9-e801ccf75389" (UID: "25e7abac-5f63-4c75-91d9-e801ccf75389"). InnerVolumeSpecName "kube-api-access-l5fd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.983766 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp" (OuterVolumeSpecName: "kube-api-access-wh8vp") pod "f1785038-0d12-43ae-9f01-774cabc5ef89" (UID: "f1785038-0d12-43ae-9f01-774cabc5ef89"). InnerVolumeSpecName "kube-api-access-wh8vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:08 crc kubenswrapper[4888]: I1201 19:37:08.985773 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "1a9245f3-0247-4dd6-b4c8-0658f524bc1c" (UID: "1a9245f3-0247-4dd6-b4c8-0658f524bc1c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.002417 4888 scope.go:117] "RemoveContainer" containerID="92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.002805 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1785038-0d12-43ae-9f01-774cabc5ef89" (UID: "f1785038-0d12-43ae-9f01-774cabc5ef89"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.028664 4888 scope.go:117] "RemoveContainer" containerID="05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.059529 4888 scope.go:117] "RemoveContainer" containerID="653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.059997 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7\": container with ID starting with 653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7 not found: ID does not exist" containerID="653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060032 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7"} err="failed to get container status \"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7\": rpc error: code = NotFound desc = could not find container \"653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7\": container with ID starting with 653e6319c08b8a484436d7c85734a389e82b96040c54a43b99bbd4903565bad7 not found: ID does not exist" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060061 4888 scope.go:117] "RemoveContainer" containerID="92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.060437 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c\": container with ID starting with 92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c not found: ID does not exist" containerID="92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060489 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c"} err="failed to get container status \"92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c\": rpc error: code = NotFound desc = could not find container \"92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c\": container with ID starting with 92da595c4449c1e7ed94612eef0cf2b13da9050be8729b07cac43d1d3ccf911c not found: ID does not exist" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060524 4888 scope.go:117] "RemoveContainer" containerID="05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.060775 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1\": container with ID starting with 05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1 not found: ID does not exist" containerID="05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060798 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1"} err="failed to get container status \"05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1\": rpc error: code = NotFound desc = could not find container \"05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1\": container with ID starting with 05649b7abf8d5568e44c7e2cbc3be781953ad6e6ba6ed4022184507766ec30c1 not found: ID does not exist" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.060812 4888 scope.go:117] "RemoveContainer" containerID="70d02ffd313add1ebce42c8ad31e9c7cb315e752b2a8c867b4ae09a3af7e4bb3" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077734 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077805 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077834 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077868 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077889 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077930 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077947 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.077971 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078012 4888 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078027 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5fd5\" (UniqueName: \"kubernetes.io/projected/25e7abac-5f63-4c75-91d9-e801ccf75389-kube-api-access-l5fd5\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078039 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078050 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078063 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh8vp\" (UniqueName: \"kubernetes.io/projected/f1785038-0d12-43ae-9f01-774cabc5ef89-kube-api-access-wh8vp\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078074 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1785038-0d12-43ae-9f01-774cabc5ef89-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078084 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mssl\" (UniqueName: \"kubernetes.io/projected/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-kube-api-access-4mssl\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078096 4888 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1a9245f3-0247-4dd6-b4c8-0658f524bc1c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078143 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078260 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078291 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078315 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078340 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078363 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078396 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.078426 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.131647 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25e7abac-5f63-4c75-91d9-e801ccf75389" (UID: "25e7abac-5f63-4c75-91d9-e801ccf75389"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.150140 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.151064 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.153026 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.153573 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.154208 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.177341 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.177862 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.178401 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.178746 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.179090 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e7abac-5f63-4c75-91d9-e801ccf75389-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.195259 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.221153 4888 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.163:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d2e90faf61026 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 19:37:09.219700774 +0000 UTC m=+229.090730698,LastTimestamp:2025-12-01 19:37:09.219700774 +0000 UTC m=+229.090730698,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.263981 4888 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 01 19:37:09 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a" Netns:"/var/run/netns/01e50928-9abf-435f-8be4-f2f0b70a323e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:09 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:09 crc kubenswrapper[4888]: > Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.264071 4888 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 01 19:37:09 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a" Netns:"/var/run/netns/01e50928-9abf-435f-8be4-f2f0b70a323e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:09 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:09 crc kubenswrapper[4888]: > pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.264102 4888 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 01 19:37:09 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a" Netns:"/var/run/netns/01e50928-9abf-435f-8be4-f2f0b70a323e" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:09 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:09 crc kubenswrapper[4888]: > pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.264210 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a\\\" Netns:\\\"/var/run/netns/01e50928-9abf-435f-8be4-f2f0b70a323e\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=aa60be255c846fd04e5670a89d688027087e4311d2f6866ba7c785c52d8adb6a;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s\\\": dial tcp 38.102.83.163:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podUID="08dcfb84-e006-4100-8a3a-26dc77a68e61" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.848248 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pspwm" event={"ID":"25e7abac-5f63-4c75-91d9-e801ccf75389","Type":"ContainerDied","Data":"b7e6e7451d649325bd175cb69d4d3d2367d4cf55eda61421da16ba886ddd656d"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.848727 4888 scope.go:117] "RemoveContainer" containerID="13f08448097b2ad1ca9941675e877959aa8010a334977be36f6d59d46120c6ea" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.849384 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pspwm" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.850428 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.850644 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.850855 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.851098 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.851450 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.852903 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" event={"ID":"1a9245f3-0247-4dd6-b4c8-0658f524bc1c","Type":"ContainerDied","Data":"a0347957bdb9c303d1e0885a366aaa5c7c621c860ecbb1b8eacd53b0fd2da241"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.855530 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf8p" event={"ID":"f1785038-0d12-43ae-9f01-774cabc5ef89","Type":"ContainerDied","Data":"1d08840f9510c9072635ada54bdcf182f120078e22efe999159171436ba2fd85"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.855631 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf8p" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.856150 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.858007 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.859798 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.862291 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9" exitCode=0 Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.862322 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3" exitCode=0 Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.862330 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63" exitCode=0 Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.862337 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84" exitCode=2 Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.863541 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.864135 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.864620 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.865179 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.865806 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.866611 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.866677 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"387e86d43628ec9b7d83f03e75f764c7495c11bf52fbb3dbd70d7f2a58bce712"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.866740 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f466049bb0304a9346e1b1ce9c25bc614822b220fe3756b11aa53896e64329e3"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.866975 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.867318 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.867728 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.868098 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.868845 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869150 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869414 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869476 4888 generic.go:334] "Generic (PLEG): container finished" podID="f3df77ab-d231-4b67-8813-f1afa968973d" containerID="4bb20c102a8ef1c5574beee6ba4ffda401c07b2eb1091bdff4cf520e73373521" exitCode=0 Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869530 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f3df77ab-d231-4b67-8813-f1afa968973d","Type":"ContainerDied","Data":"4bb20c102a8ef1c5574beee6ba4ffda401c07b2eb1091bdff4cf520e73373521"} Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869601 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869686 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.869939 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.870145 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:09 crc kubenswrapper[4888]: E1201 19:37:09.870232 4888 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.163:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.870340 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.870779 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.871428 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.871792 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.872037 4888 scope.go:117] "RemoveContainer" containerID="783674afee97de4c406e99a603a573754d7fa9fbe208660490926a28b08c1134" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.872121 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.872492 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.872828 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.873285 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.873729 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.913903 4888 scope.go:117] "RemoveContainer" containerID="740f66fe651111ae05f94f588c2b7e4ffd64444b8e256db3dd08a6a8047561a8" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.957355 4888 scope.go:117] "RemoveContainer" containerID="aad7c55e6a61ec3e5533234bd3a580bba65c0081073e50854948479f22174ec5" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.972387 4888 scope.go:117] "RemoveContainer" containerID="136af63f3658007e2aff14d42428bb73aeb93c79c28b715d8e06f48a6d3e0b08" Dec 01 19:37:09 crc kubenswrapper[4888]: I1201 19:37:09.987908 4888 scope.go:117] "RemoveContainer" containerID="7a3499e4296f1d02ce3a6a3911943e6de82eec50190f1a6d76976ca21d36d903" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.011968 4888 scope.go:117] "RemoveContainer" containerID="cb4a68fe439de21c96a12110ed3642397e638e5b6ffea581c2d6bd14177a6976" Dec 01 19:37:10 crc kubenswrapper[4888]: E1201 19:37:10.298601 4888 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 01 19:37:10 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5" Netns:"/var/run/netns/6dfc4cd2-9558-4d15-adc0-6da008f8633c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:10 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:10 crc kubenswrapper[4888]: > Dec 01 19:37:10 crc kubenswrapper[4888]: E1201 19:37:10.298664 4888 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 01 19:37:10 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5" Netns:"/var/run/netns/6dfc4cd2-9558-4d15-adc0-6da008f8633c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:10 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:10 crc kubenswrapper[4888]: > pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:10 crc kubenswrapper[4888]: E1201 19:37:10.298685 4888 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 01 19:37:10 crc kubenswrapper[4888]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5" Netns:"/var/run/netns/6dfc4cd2-9558-4d15-adc0-6da008f8633c" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s": dial tcp 38.102.83.163:6443: connect: connection refused Dec 01 19:37:10 crc kubenswrapper[4888]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 01 19:37:10 crc kubenswrapper[4888]: > pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:10 crc kubenswrapper[4888]: E1201 19:37:10.298741 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-w5r85_openshift-marketplace_08dcfb84-e006-4100-8a3a-26dc77a68e61_0(984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5): error adding pod openshift-marketplace_marketplace-operator-79b997595-w5r85 to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5\\\" Netns:\\\"/var/run/netns/6dfc4cd2-9558-4d15-adc0-6da008f8633c\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-w5r85;K8S_POD_INFRA_CONTAINER_ID=984f98303e2f7d2c450dd445f049117fae5ef547f3d8c3b2052be82abf0921d5;K8S_POD_UID=08dcfb84-e006-4100-8a3a-26dc77a68e61\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-w5r85] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-w5r85/08dcfb84-e006-4100-8a3a-26dc77a68e61]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-w5r85 in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-w5r85?timeout=1m0s\\\": dial tcp 38.102.83.163:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podUID="08dcfb84-e006-4100-8a3a-26dc77a68e61" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.454348 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.454865 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.455199 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.455502 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.455721 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.455910 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.456118 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:10 crc kubenswrapper[4888]: I1201 19:37:10.879012 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.219540 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.220018 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.220288 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.220500 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.220811 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.221197 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.221403 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.224415 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.225163 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.225606 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.226040 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.226255 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.226453 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.226695 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.226925 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.227176 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408714 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock\") pod \"f3df77ab-d231-4b67-8813-f1afa968973d\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408782 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408819 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir\") pod \"f3df77ab-d231-4b67-8813-f1afa968973d\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408848 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408864 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock" (OuterVolumeSpecName: "var-lock") pod "f3df77ab-d231-4b67-8813-f1afa968973d" (UID: "f3df77ab-d231-4b67-8813-f1afa968973d"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408865 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408908 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f3df77ab-d231-4b67-8813-f1afa968973d" (UID: "f3df77ab-d231-4b67-8813-f1afa968973d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408914 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access\") pod \"f3df77ab-d231-4b67-8813-f1afa968973d\" (UID: \"f3df77ab-d231-4b67-8813-f1afa968973d\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408922 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.408959 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409076 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409277 4888 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409575 4888 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409586 4888 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409596 4888 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f3df77ab-d231-4b67-8813-f1afa968973d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.409605 4888 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.419043 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f3df77ab-d231-4b67-8813-f1afa968973d" (UID: "f3df77ab-d231-4b67-8813-f1afa968973d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.547332 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f3df77ab-d231-4b67-8813-f1afa968973d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.894324 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.895108 4888 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10" exitCode=0 Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.895172 4888 scope.go:117] "RemoveContainer" containerID="e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.895319 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.900111 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"f3df77ab-d231-4b67-8813-f1afa968973d","Type":"ContainerDied","Data":"40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21"} Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.900147 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40a2aaca4621efbc486d4ccf9de17069bbdb8f01b294ff79596e3a40a9355a21" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.900209 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.912820 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.913051 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.913441 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.913510 4888 scope.go:117] "RemoveContainer" containerID="95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.913752 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.914052 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.914415 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.914764 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.922686 4888 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.923009 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.923250 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.923473 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.923742 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.923966 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.924205 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.929126 4888 scope.go:117] "RemoveContainer" containerID="4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.945154 4888 scope.go:117] "RemoveContainer" containerID="4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.960994 4888 scope.go:117] "RemoveContainer" containerID="3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10" Dec 01 19:37:11 crc kubenswrapper[4888]: I1201 19:37:11.981704 4888 scope.go:117] "RemoveContainer" containerID="df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.001860 4888 scope.go:117] "RemoveContainer" containerID="e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.003160 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\": container with ID starting with e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9 not found: ID does not exist" containerID="e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.003223 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9"} err="failed to get container status \"e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\": rpc error: code = NotFound desc = could not find container \"e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9\": container with ID starting with e94b80f43a2033aa156005dac0bb551bbbc7a0334b071cdc62c03af4ed19a0d9 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.003252 4888 scope.go:117] "RemoveContainer" containerID="95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.004730 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\": container with ID starting with 95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3 not found: ID does not exist" containerID="95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.004772 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3"} err="failed to get container status \"95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\": rpc error: code = NotFound desc = could not find container \"95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3\": container with ID starting with 95da8728ba70af3fdd0b43d1169eae2ae640328ab4bb8645f88237e2b32881a3 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.004804 4888 scope.go:117] "RemoveContainer" containerID="4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.005146 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\": container with ID starting with 4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63 not found: ID does not exist" containerID="4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.005176 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63"} err="failed to get container status \"4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\": rpc error: code = NotFound desc = could not find container \"4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63\": container with ID starting with 4a2f09754ce53e53a0594bccaf4990aa3e77ec49240858f5339f36eb1aed3d63 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.005206 4888 scope.go:117] "RemoveContainer" containerID="4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.005660 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\": container with ID starting with 4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84 not found: ID does not exist" containerID="4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.005698 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84"} err="failed to get container status \"4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\": rpc error: code = NotFound desc = could not find container \"4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84\": container with ID starting with 4ed11ef1b9115f6a17a6a27a1084a266af074287fbc020bd458c1fcb4bc24e84 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.005729 4888 scope.go:117] "RemoveContainer" containerID="3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.006094 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\": container with ID starting with 3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10 not found: ID does not exist" containerID="3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.006121 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10"} err="failed to get container status \"3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\": rpc error: code = NotFound desc = could not find container \"3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10\": container with ID starting with 3c81dd8f85556233c6c15dd16b8618bc78c2c2907e2a48ba00431121cfba0e10 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.006217 4888 scope.go:117] "RemoveContainer" containerID="df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.006477 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\": container with ID starting with df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245 not found: ID does not exist" containerID="df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.006499 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245"} err="failed to get container status \"df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\": rpc error: code = NotFound desc = could not find container \"df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245\": container with ID starting with df5e2f0a435b40ad41113f8f3698aa9339dce6f672d8522b812beb0a5dcf8245 not found: ID does not exist" Dec 01 19:37:12 crc kubenswrapper[4888]: I1201 19:37:12.458734 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 01 19:37:12 crc kubenswrapper[4888]: E1201 19:37:12.634417 4888 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.163:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187d2e90faf61026 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 19:37:09.219700774 +0000 UTC m=+229.090730698,LastTimestamp:2025-12-01 19:37:09.219700774 +0000 UTC m=+229.090730698,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.949894 4888 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.950397 4888 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.950902 4888 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.951469 4888 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.951957 4888 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:15 crc kubenswrapper[4888]: I1201 19:37:15.952007 4888 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 01 19:37:15 crc kubenswrapper[4888]: E1201 19:37:15.952517 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="200ms" Dec 01 19:37:16 crc kubenswrapper[4888]: E1201 19:37:16.153504 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="400ms" Dec 01 19:37:16 crc kubenswrapper[4888]: E1201 19:37:16.554770 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="800ms" Dec 01 19:37:17 crc kubenswrapper[4888]: E1201 19:37:17.356524 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="1.6s" Dec 01 19:37:18 crc kubenswrapper[4888]: E1201 19:37:18.957531 4888 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.163:6443: connect: connection refused" interval="3.2s" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.450860 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.468345 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.470228 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.470493 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.470766 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.471374 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.472734 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.473147 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.473605 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.473877 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.474318 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.474962 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.475500 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.483644 4888 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.483672 4888 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:20 crc kubenswrapper[4888]: E1201 19:37:20.484089 4888 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.484645 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.953399 4888 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="051dfce181b2b0c3f44308adb78c0e0640e5a5c1bf5c6b5d5558d7b60819df9b" exitCode=0 Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.953506 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"051dfce181b2b0c3f44308adb78c0e0640e5a5c1bf5c6b5d5558d7b60819df9b"} Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.953959 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"96003874bac7a9f0d3c14672da2f8b8eab1730001292ab24d401e672f03796a4"} Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.954410 4888 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.954441 4888 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:20 crc kubenswrapper[4888]: E1201 19:37:20.955129 4888 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.955292 4888 status_manager.go:851] "Failed to get status for pod" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" pod="openshift-marketplace/certified-operators-665fr" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-665fr\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.955899 4888 status_manager.go:851] "Failed to get status for pod" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" pod="openshift-marketplace/redhat-marketplace-ppf8p" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ppf8p\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.956268 4888 status_manager.go:851] "Failed to get status for pod" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.956749 4888 status_manager.go:851] "Failed to get status for pod" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" pod="openshift-marketplace/redhat-operators-pspwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pspwm\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.957066 4888 status_manager.go:851] "Failed to get status for pod" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" pod="openshift-marketplace/community-operators-hqn42" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hqn42\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:20 crc kubenswrapper[4888]: I1201 19:37:20.957399 4888 status_manager.go:851] "Failed to get status for pod" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" pod="openshift-marketplace/marketplace-operator-79b997595-t425v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-t425v\": dial tcp 38.102.83.163:6443: connect: connection refused" Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.974107 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"61e4814758e880f9d87433abe7ca7429c7939e8372032be28858cab512813167"} Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.974549 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"12b9c9d2c8764ae4e4e0bc3b195746a5923fdcf0d9e60abadbf888cd9580e6ad"} Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.974568 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1b56e0352d9606ea18634b48f24cba12c85a90a0f15a0b903e7e82c001aaf884"} Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.974603 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f7fea65344e6c9268110c61bdbc76155ab56c132a5aa101a68e53f50878985f2"} Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.978546 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.978584 4888 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346" exitCode=1 Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.978611 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346"} Dec 01 19:37:21 crc kubenswrapper[4888]: I1201 19:37:21.979091 4888 scope.go:117] "RemoveContainer" containerID="b93a8b1dfa8b7cc47cbc7554396982c8d48080d99c9415a035c2c89f421bb346" Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.743164 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.985129 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.985238 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c084c158852eeb7bb8a71391f45dc422f830ee5c729bfd43bb398c541ae4a9b9"} Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.988716 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"965b8a2fac4563f5b6936496ed379586fb33cd52d2dad180010fde35170fa33a"} Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.988914 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.989086 4888 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:22 crc kubenswrapper[4888]: I1201 19:37:22.989125 4888 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:23 crc kubenswrapper[4888]: I1201 19:37:23.426515 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:37:23 crc kubenswrapper[4888]: I1201 19:37:23.450209 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:23 crc kubenswrapper[4888]: I1201 19:37:23.451023 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:25 crc kubenswrapper[4888]: I1201 19:37:25.485215 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:25 crc kubenswrapper[4888]: I1201 19:37:25.485494 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:25 crc kubenswrapper[4888]: I1201 19:37:25.490480 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:28 crc kubenswrapper[4888]: I1201 19:37:28.001741 4888 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:28 crc kubenswrapper[4888]: I1201 19:37:28.897392 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:37:28 crc kubenswrapper[4888]: I1201 19:37:28.903103 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:37:29 crc kubenswrapper[4888]: I1201 19:37:29.022168 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" event={"ID":"08dcfb84-e006-4100-8a3a-26dc77a68e61","Type":"ContainerStarted","Data":"6d77fd92e890b5830444d8ae7d4e8f0a015a0307434c7f1087739645eb61e70d"} Dec 01 19:37:29 crc kubenswrapper[4888]: I1201 19:37:29.022486 4888 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:29 crc kubenswrapper[4888]: I1201 19:37:29.022629 4888 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:29 crc kubenswrapper[4888]: I1201 19:37:29.028910 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.031410 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/0.log" Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.031487 4888 generic.go:334] "Generic (PLEG): container finished" podID="08dcfb84-e006-4100-8a3a-26dc77a68e61" containerID="18199f9d52df16aeba64c14e23504943d9afca72eecf9f3c6afcf6efb060cb64" exitCode=1 Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.031603 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" event={"ID":"08dcfb84-e006-4100-8a3a-26dc77a68e61","Type":"ContainerDied","Data":"18199f9d52df16aeba64c14e23504943d9afca72eecf9f3c6afcf6efb060cb64"} Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.032018 4888 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.032038 4888 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7a8b2891-c633-4161-89d6-12f4270339ae" Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.032733 4888 scope.go:117] "RemoveContainer" containerID="18199f9d52df16aeba64c14e23504943d9afca72eecf9f3c6afcf6efb060cb64" Dec 01 19:37:30 crc kubenswrapper[4888]: I1201 19:37:30.473993 4888 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fffce7b8-5298-4c0c-9486-f866670a1cc8" Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043301 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/1.log" Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043620 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/0.log" Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043658 4888 generic.go:334] "Generic (PLEG): container finished" podID="08dcfb84-e006-4100-8a3a-26dc77a68e61" containerID="719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158" exitCode=1 Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043685 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" event={"ID":"08dcfb84-e006-4100-8a3a-26dc77a68e61","Type":"ContainerDied","Data":"719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158"} Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043718 4888 scope.go:117] "RemoveContainer" containerID="18199f9d52df16aeba64c14e23504943d9afca72eecf9f3c6afcf6efb060cb64" Dec 01 19:37:31 crc kubenswrapper[4888]: I1201 19:37:31.043991 4888 scope.go:117] "RemoveContainer" containerID="719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158" Dec 01 19:37:31 crc kubenswrapper[4888]: E1201 19:37:31.045223 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\"" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podUID="08dcfb84-e006-4100-8a3a-26dc77a68e61" Dec 01 19:37:32 crc kubenswrapper[4888]: I1201 19:37:32.053732 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/1.log" Dec 01 19:37:32 crc kubenswrapper[4888]: I1201 19:37:32.054256 4888 scope.go:117] "RemoveContainer" containerID="719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158" Dec 01 19:37:32 crc kubenswrapper[4888]: E1201 19:37:32.054519 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\"" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podUID="08dcfb84-e006-4100-8a3a-26dc77a68e61" Dec 01 19:37:33 crc kubenswrapper[4888]: I1201 19:37:33.429910 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 19:37:34 crc kubenswrapper[4888]: I1201 19:37:34.325223 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 19:37:34 crc kubenswrapper[4888]: I1201 19:37:34.402407 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 19:37:35 crc kubenswrapper[4888]: I1201 19:37:35.244234 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 19:37:35 crc kubenswrapper[4888]: I1201 19:37:35.330143 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.035628 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.172360 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.449761 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.598076 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.598128 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.598739 4888 scope.go:117] "RemoveContainer" containerID="719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158" Dec 01 19:37:38 crc kubenswrapper[4888]: E1201 19:37:38.599016 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-w5r85_openshift-marketplace(08dcfb84-e006-4100-8a3a-26dc77a68e61)\"" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podUID="08dcfb84-e006-4100-8a3a-26dc77a68e61" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.771537 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 19:37:38 crc kubenswrapper[4888]: I1201 19:37:38.991077 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 19:37:39 crc kubenswrapper[4888]: I1201 19:37:39.429096 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 19:37:39 crc kubenswrapper[4888]: I1201 19:37:39.512943 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 19:37:39 crc kubenswrapper[4888]: I1201 19:37:39.870011 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 19:37:39 crc kubenswrapper[4888]: I1201 19:37:39.908247 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.203932 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.293689 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.345766 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.570285 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.663625 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 19:37:40 crc kubenswrapper[4888]: I1201 19:37:40.900864 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.029508 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.068564 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.324411 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.352488 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.412787 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.536559 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.660057 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.782413 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.870492 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 19:37:41 crc kubenswrapper[4888]: I1201 19:37:41.879480 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.024114 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.026833 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.244732 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.291936 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.367712 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.389875 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.424803 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.601042 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.861275 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 19:37:42 crc kubenswrapper[4888]: I1201 19:37:42.921794 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.165400 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.205064 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.303951 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.410531 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.411699 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.469273 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.484843 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.493958 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.501066 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.646276 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.716918 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.762260 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.804984 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.817642 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.857638 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.915265 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.975156 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 19:37:43 crc kubenswrapper[4888]: I1201 19:37:43.993140 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.000060 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.090076 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.100038 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.243520 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.271227 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.310826 4888 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.348490 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.395784 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.399402 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.518739 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.598837 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.615979 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.687554 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.689651 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.704070 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.942643 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.975342 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.985031 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 19:37:44 crc kubenswrapper[4888]: I1201 19:37:44.990681 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.179315 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.222856 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.240645 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.297392 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.324906 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.379567 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.405126 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.444483 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.445822 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.524028 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.644256 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.647565 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.707719 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.723913 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.817787 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.922542 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 19:37:45 crc kubenswrapper[4888]: I1201 19:37:45.945622 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.180617 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.245505 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.265543 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.328362 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.358704 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.408855 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.436572 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.503880 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.514948 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.599936 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.618008 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.619747 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.691099 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.726701 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.742153 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.766224 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.903049 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 19:37:46 crc kubenswrapper[4888]: I1201 19:37:46.919326 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.019699 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.031333 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.031428 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.083941 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.125574 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.197534 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.222318 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.264887 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.278635 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.433389 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.564095 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.676502 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.730541 4888 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.732260 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 19:37:47 crc kubenswrapper[4888]: I1201 19:37:47.911894 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.047344 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.095605 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.197598 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.405850 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.509066 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.535564 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.671234 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.695077 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.719650 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.744831 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.751273 4888 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.886854 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.900894 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.914828 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 19:37:48 crc kubenswrapper[4888]: I1201 19:37:48.950177 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.091171 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.102612 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.107940 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.169290 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.256028 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.276020 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.313899 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.332057 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.451656 4888 scope.go:117] "RemoveContainer" containerID="719883d6c5678a3b833df6b47ed876dba2604699548add9aa800bc5d1ddd5158" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.453568 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.483843 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.497092 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.605513 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.754879 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.945124 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.985716 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 19:37:49 crc kubenswrapper[4888]: I1201 19:37:49.989283 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.064530 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.120140 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.141420 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/1.log" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.141474 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" event={"ID":"08dcfb84-e006-4100-8a3a-26dc77a68e61","Type":"ContainerStarted","Data":"0e156929b90d4d53a25f0722983ba589a7d891085ed92a10276c3eec0583232f"} Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.141847 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.145262 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.171268 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.197951 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.198136 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.199636 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.210280 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.359788 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.360737 4888 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.361336 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-w5r85" podStartSLOduration=42.361322093 podStartE2EDuration="42.361322093s" podCreationTimestamp="2025-12-01 19:37:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:37:50.158431044 +0000 UTC m=+270.029460958" watchObservedRunningTime="2025-12-01 19:37:50.361322093 +0000 UTC m=+270.232352007" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.365419 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf8p","openshift-marketplace/community-operators-hqn42","openshift-marketplace/redhat-operators-pspwm","openshift-marketplace/marketplace-operator-79b997595-t425v","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-665fr"] Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.365508 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.365532 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w5r85"] Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.368862 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.381897 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.381869786 podStartE2EDuration="22.381869786s" podCreationTimestamp="2025-12-01 19:37:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:37:50.37959308 +0000 UTC m=+270.250622994" watchObservedRunningTime="2025-12-01 19:37:50.381869786 +0000 UTC m=+270.252899700" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.438630 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.452969 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.458365 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12306c7a-35ef-4c7c-9d19-dc9463a224d0" path="/var/lib/kubelet/pods/12306c7a-35ef-4c7c-9d19-dc9463a224d0/volumes" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.459053 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a9245f3-0247-4dd6-b4c8-0658f524bc1c" path="/var/lib/kubelet/pods/1a9245f3-0247-4dd6-b4c8-0658f524bc1c/volumes" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.459509 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e7abac-5f63-4c75-91d9-e801ccf75389" path="/var/lib/kubelet/pods/25e7abac-5f63-4c75-91d9-e801ccf75389/volumes" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.460537 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1785038-0d12-43ae-9f01-774cabc5ef89" path="/var/lib/kubelet/pods/f1785038-0d12-43ae-9f01-774cabc5ef89/volumes" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.461097 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f87ad7cb-a463-4db6-895b-c57bf55140b9" path="/var/lib/kubelet/pods/f87ad7cb-a463-4db6-895b-c57bf55140b9/volumes" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.530067 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.537163 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.690070 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.691962 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.692283 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.692942 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.712866 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.780148 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.799049 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.842006 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.846171 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.939078 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.951409 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.962489 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 19:37:50 crc kubenswrapper[4888]: I1201 19:37:50.967004 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.024502 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.050769 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.057992 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.067002 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.092457 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.130602 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.258165 4888 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.360587 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.462282 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.525444 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.549954 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.575050 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.832320 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 19:37:51 crc kubenswrapper[4888]: I1201 19:37:51.874219 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.057337 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.073573 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.176426 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.179479 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.327957 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.382920 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.469687 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.481428 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.548409 4888 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.612932 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.629932 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.636009 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.676280 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.885068 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 19:37:52 crc kubenswrapper[4888]: I1201 19:37:52.997955 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.034387 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.103539 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.115391 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.137109 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.232243 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.303762 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.589812 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.634373 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.686429 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.725077 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.729825 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 19:37:53 crc kubenswrapper[4888]: I1201 19:37:53.790083 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.116884 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.134738 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.150850 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.195429 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.213377 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.220070 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.239670 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.353520 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.376786 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.571512 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.650616 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.672412 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.723426 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 19:37:54 crc kubenswrapper[4888]: I1201 19:37:54.927576 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.010575 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.214751 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.270448 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.467900 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.612427 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 19:37:55 crc kubenswrapper[4888]: I1201 19:37:55.753506 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 19:37:56 crc kubenswrapper[4888]: I1201 19:37:56.302902 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 19:37:56 crc kubenswrapper[4888]: I1201 19:37:56.651145 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 19:37:56 crc kubenswrapper[4888]: I1201 19:37:56.981115 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 19:37:58 crc kubenswrapper[4888]: I1201 19:37:58.368579 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 19:38:01 crc kubenswrapper[4888]: I1201 19:38:01.699891 4888 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 19:38:01 crc kubenswrapper[4888]: I1201 19:38:01.700311 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://387e86d43628ec9b7d83f03e75f764c7495c11bf52fbb3dbd70d7f2a58bce712" gracePeriod=5 Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.239464 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.239778 4888 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="387e86d43628ec9b7d83f03e75f764c7495c11bf52fbb3dbd70d7f2a58bce712" exitCode=137 Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.239839 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f466049bb0304a9346e1b1ce9c25bc614822b220fe3756b11aa53896e64329e3" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.268222 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.268290 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.322899 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.322967 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323013 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323053 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323081 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323404 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323618 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323650 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.323676 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.331044 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.424445 4888 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.424488 4888 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.424503 4888 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.424516 4888 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:07 crc kubenswrapper[4888]: I1201 19:38:07.424526 4888 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:08 crc kubenswrapper[4888]: I1201 19:38:08.244602 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 19:38:08 crc kubenswrapper[4888]: I1201 19:38:08.460062 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 01 19:38:20 crc kubenswrapper[4888]: I1201 19:38:20.314977 4888 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.059454 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.060442 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" podUID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" containerName="controller-manager" containerID="cri-o://3bcf5dd215143992669d730a4d02125bc2c89920d85bd07920e7402ac6bafe0b" gracePeriod=30 Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.154386 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.154897 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" podUID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" containerName="route-controller-manager" containerID="cri-o://7c345aa26cae54dfa74cbc8c0a15899088eb0fa129f391db263f14737c6fb5e4" gracePeriod=30 Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.319115 4888 generic.go:334] "Generic (PLEG): container finished" podID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" containerID="3bcf5dd215143992669d730a4d02125bc2c89920d85bd07920e7402ac6bafe0b" exitCode=0 Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.319205 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" event={"ID":"340ce8b7-3ad7-46fb-a45e-b70641e8661c","Type":"ContainerDied","Data":"3bcf5dd215143992669d730a4d02125bc2c89920d85bd07920e7402ac6bafe0b"} Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.320595 4888 generic.go:334] "Generic (PLEG): container finished" podID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" containerID="7c345aa26cae54dfa74cbc8c0a15899088eb0fa129f391db263f14737c6fb5e4" exitCode=0 Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.320635 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" event={"ID":"aeb4ebff-81e9-4beb-b7fa-a800849d16ff","Type":"ContainerDied","Data":"7c345aa26cae54dfa74cbc8c0a15899088eb0fa129f391db263f14737c6fb5e4"} Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.441589 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.517051 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.545942 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config\") pod \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.546034 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles\") pod \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.546065 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x25vz\" (UniqueName: \"kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz\") pod \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.546090 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca\") pod \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.546124 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert\") pod \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\" (UID: \"340ce8b7-3ad7-46fb-a45e-b70641e8661c\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.547096 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config" (OuterVolumeSpecName: "config") pod "340ce8b7-3ad7-46fb-a45e-b70641e8661c" (UID: "340ce8b7-3ad7-46fb-a45e-b70641e8661c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.547414 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "340ce8b7-3ad7-46fb-a45e-b70641e8661c" (UID: "340ce8b7-3ad7-46fb-a45e-b70641e8661c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.547597 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca" (OuterVolumeSpecName: "client-ca") pod "340ce8b7-3ad7-46fb-a45e-b70641e8661c" (UID: "340ce8b7-3ad7-46fb-a45e-b70641e8661c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.551909 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "340ce8b7-3ad7-46fb-a45e-b70641e8661c" (UID: "340ce8b7-3ad7-46fb-a45e-b70641e8661c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.552461 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz" (OuterVolumeSpecName: "kube-api-access-x25vz") pod "340ce8b7-3ad7-46fb-a45e-b70641e8661c" (UID: "340ce8b7-3ad7-46fb-a45e-b70641e8661c"). InnerVolumeSpecName "kube-api-access-x25vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647367 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca\") pod \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647521 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config\") pod \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647569 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert\") pod \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647622 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvscn\" (UniqueName: \"kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn\") pod \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\" (UID: \"aeb4ebff-81e9-4beb-b7fa-a800849d16ff\") " Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647830 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647845 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647857 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x25vz\" (UniqueName: \"kubernetes.io/projected/340ce8b7-3ad7-46fb-a45e-b70641e8661c-kube-api-access-x25vz\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647868 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/340ce8b7-3ad7-46fb-a45e-b70641e8661c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.647878 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/340ce8b7-3ad7-46fb-a45e-b70641e8661c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.648345 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca" (OuterVolumeSpecName: "client-ca") pod "aeb4ebff-81e9-4beb-b7fa-a800849d16ff" (UID: "aeb4ebff-81e9-4beb-b7fa-a800849d16ff"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.648379 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config" (OuterVolumeSpecName: "config") pod "aeb4ebff-81e9-4beb-b7fa-a800849d16ff" (UID: "aeb4ebff-81e9-4beb-b7fa-a800849d16ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.651334 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn" (OuterVolumeSpecName: "kube-api-access-lvscn") pod "aeb4ebff-81e9-4beb-b7fa-a800849d16ff" (UID: "aeb4ebff-81e9-4beb-b7fa-a800849d16ff"). InnerVolumeSpecName "kube-api-access-lvscn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.651751 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "aeb4ebff-81e9-4beb-b7fa-a800849d16ff" (UID: "aeb4ebff-81e9-4beb-b7fa-a800849d16ff"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.749301 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.749340 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.749370 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvscn\" (UniqueName: \"kubernetes.io/projected/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-kube-api-access-lvscn\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:23 crc kubenswrapper[4888]: I1201 19:38:23.749380 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aeb4ebff-81e9-4beb-b7fa-a800849d16ff-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.327259 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" event={"ID":"340ce8b7-3ad7-46fb-a45e-b70641e8661c","Type":"ContainerDied","Data":"ef6cfe4c993ff4026bf2a04a84b039f901c3071742164896768bdf5579d3c6a1"} Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.327332 4888 scope.go:117] "RemoveContainer" containerID="3bcf5dd215143992669d730a4d02125bc2c89920d85bd07920e7402ac6bafe0b" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.327472 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8tfrf" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.334144 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" event={"ID":"aeb4ebff-81e9-4beb-b7fa-a800849d16ff","Type":"ContainerDied","Data":"6af95cb375bf3f61f170ff5fa1b29f62d24cd60f4ad54149a8c79d5d9e42f1e7"} Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.334200 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.354234 4888 scope.go:117] "RemoveContainer" containerID="7c345aa26cae54dfa74cbc8c0a15899088eb0fa129f391db263f14737c6fb5e4" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.365067 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.375358 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gdgdb"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.392652 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.397076 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8tfrf"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.470065 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" path="/var/lib/kubelet/pods/340ce8b7-3ad7-46fb-a45e-b70641e8661c/volumes" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.471098 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" path="/var/lib/kubelet/pods/aeb4ebff-81e9-4beb-b7fa-a800849d16ff/volumes" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.741665 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:24 crc kubenswrapper[4888]: E1201 19:38:24.742439 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" containerName="route-controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742458 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" containerName="route-controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: E1201 19:38:24.742469 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" containerName="installer" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742477 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" containerName="installer" Dec 01 19:38:24 crc kubenswrapper[4888]: E1201 19:38:24.742493 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" containerName="controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742502 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" containerName="controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: E1201 19:38:24.742516 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742524 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742638 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb4ebff-81e9-4beb-b7fa-a800849d16ff" containerName="route-controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742656 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742668 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="340ce8b7-3ad7-46fb-a45e-b70641e8661c" containerName="controller-manager" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.742680 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3df77ab-d231-4b67-8813-f1afa968973d" containerName="installer" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.743128 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.744823 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.745559 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.745588 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.747338 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.747814 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.748018 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.748133 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.748439 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.748586 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.748700 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.749010 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.749378 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.749761 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.749955 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.754019 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.757414 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.765695 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863628 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863688 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dg7l\" (UniqueName: \"kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863775 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863841 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k76t\" (UniqueName: \"kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863881 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863939 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863957 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.863984 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.864124 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965630 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965721 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965738 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965754 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965779 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965799 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dg7l\" (UniqueName: \"kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965830 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965860 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k76t\" (UniqueName: \"kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.965892 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.967039 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.967667 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.967729 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.967936 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.969416 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.970721 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.973746 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.983731 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dg7l\" (UniqueName: \"kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l\") pod \"controller-manager-7f47dcb4b4-nxs4l\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:24 crc kubenswrapper[4888]: I1201 19:38:24.983779 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k76t\" (UniqueName: \"kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t\") pod \"route-controller-manager-555cb4d5dd-2tgnb\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:25 crc kubenswrapper[4888]: I1201 19:38:25.060945 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:25 crc kubenswrapper[4888]: I1201 19:38:25.071242 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:25 crc kubenswrapper[4888]: I1201 19:38:25.309679 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:25 crc kubenswrapper[4888]: W1201 19:38:25.340681 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff4272eb_f24b_418c_b389_5e9444e1e18e.slice/crio-40b0d2d4df33046d2a292950e6d98d6ee441d2ca95cc1357d4dbf1893f2cd700 WatchSource:0}: Error finding container 40b0d2d4df33046d2a292950e6d98d6ee441d2ca95cc1357d4dbf1893f2cd700: Status 404 returned error can't find the container with id 40b0d2d4df33046d2a292950e6d98d6ee441d2ca95cc1357d4dbf1893f2cd700 Dec 01 19:38:25 crc kubenswrapper[4888]: I1201 19:38:25.408709 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.374731 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" event={"ID":"f19b1a74-e044-4e34-b59a-0aab06417e9f","Type":"ContainerStarted","Data":"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51"} Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.375068 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" event={"ID":"f19b1a74-e044-4e34-b59a-0aab06417e9f","Type":"ContainerStarted","Data":"b37c5893732d57ef30751c724a579e3d6d5752f65b644b031f604e2f94932192"} Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.375092 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.377699 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" event={"ID":"ff4272eb-f24b-418c-b389-5e9444e1e18e","Type":"ContainerStarted","Data":"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949"} Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.377803 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" event={"ID":"ff4272eb-f24b-418c-b389-5e9444e1e18e","Type":"ContainerStarted","Data":"40b0d2d4df33046d2a292950e6d98d6ee441d2ca95cc1357d4dbf1893f2cd700"} Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.377876 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.380236 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.383020 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.393726 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" podStartSLOduration=3.393706413 podStartE2EDuration="3.393706413s" podCreationTimestamp="2025-12-01 19:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:38:26.391132989 +0000 UTC m=+306.262162903" watchObservedRunningTime="2025-12-01 19:38:26.393706413 +0000 UTC m=+306.264736317" Dec 01 19:38:26 crc kubenswrapper[4888]: I1201 19:38:26.407223 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" podStartSLOduration=3.4072075330000002 podStartE2EDuration="3.407207533s" podCreationTimestamp="2025-12-01 19:38:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:38:26.406554004 +0000 UTC m=+306.277583938" watchObservedRunningTime="2025-12-01 19:38:26.407207533 +0000 UTC m=+306.278237447" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.672682 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jp5pm"] Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.673668 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.675850 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.683619 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jp5pm"] Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.725621 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltvnb\" (UniqueName: \"kubernetes.io/projected/a1f98748-d4c4-4e86-93a3-5ebe405250ee-kube-api-access-ltvnb\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.725700 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-utilities\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.725843 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-catalog-content\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.827556 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-utilities\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.827653 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-catalog-content\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.827720 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltvnb\" (UniqueName: \"kubernetes.io/projected/a1f98748-d4c4-4e86-93a3-5ebe405250ee-kube-api-access-ltvnb\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.828132 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-utilities\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.828416 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f98748-d4c4-4e86-93a3-5ebe405250ee-catalog-content\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.852145 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltvnb\" (UniqueName: \"kubernetes.io/projected/a1f98748-d4c4-4e86-93a3-5ebe405250ee-kube-api-access-ltvnb\") pod \"redhat-operators-jp5pm\" (UID: \"a1f98748-d4c4-4e86-93a3-5ebe405250ee\") " pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.874464 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gcn8p"] Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.875667 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.878558 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.889387 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcn8p"] Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.928923 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsjml\" (UniqueName: \"kubernetes.io/projected/42ea68bc-ce83-490b-89ab-30ac5124fb9e-kube-api-access-zsjml\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.928991 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-utilities\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:27 crc kubenswrapper[4888]: I1201 19:38:27.929178 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-catalog-content\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.030321 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsjml\" (UniqueName: \"kubernetes.io/projected/42ea68bc-ce83-490b-89ab-30ac5124fb9e-kube-api-access-zsjml\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.030395 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-utilities\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.030437 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-catalog-content\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.030955 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-utilities\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.030970 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42ea68bc-ce83-490b-89ab-30ac5124fb9e-catalog-content\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.037213 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.048959 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsjml\" (UniqueName: \"kubernetes.io/projected/42ea68bc-ce83-490b-89ab-30ac5124fb9e-kube-api-access-zsjml\") pod \"redhat-marketplace-gcn8p\" (UID: \"42ea68bc-ce83-490b-89ab-30ac5124fb9e\") " pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.195863 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.245945 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jp5pm"] Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.392204 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jp5pm" event={"ID":"a1f98748-d4c4-4e86-93a3-5ebe405250ee","Type":"ContainerStarted","Data":"5e6a9057242c5424346cc6f9471895ae5c057c9a0bd6ca7a39bb0faaea706adc"} Dec 01 19:38:28 crc kubenswrapper[4888]: I1201 19:38:28.408239 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gcn8p"] Dec 01 19:38:28 crc kubenswrapper[4888]: W1201 19:38:28.417026 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42ea68bc_ce83_490b_89ab_30ac5124fb9e.slice/crio-7ac261ed5301fb1da151f136ad18f9ceec264c04dd96359d67a6daba5bbfa9be WatchSource:0}: Error finding container 7ac261ed5301fb1da151f136ad18f9ceec264c04dd96359d67a6daba5bbfa9be: Status 404 returned error can't find the container with id 7ac261ed5301fb1da151f136ad18f9ceec264c04dd96359d67a6daba5bbfa9be Dec 01 19:38:29 crc kubenswrapper[4888]: I1201 19:38:29.398235 4888 generic.go:334] "Generic (PLEG): container finished" podID="a1f98748-d4c4-4e86-93a3-5ebe405250ee" containerID="7e22eaf94b01e20bdf96862fb39e686dfbba7d69135f796a88ceb6c543cd389d" exitCode=0 Dec 01 19:38:29 crc kubenswrapper[4888]: I1201 19:38:29.398301 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jp5pm" event={"ID":"a1f98748-d4c4-4e86-93a3-5ebe405250ee","Type":"ContainerDied","Data":"7e22eaf94b01e20bdf96862fb39e686dfbba7d69135f796a88ceb6c543cd389d"} Dec 01 19:38:29 crc kubenswrapper[4888]: I1201 19:38:29.399416 4888 generic.go:334] "Generic (PLEG): container finished" podID="42ea68bc-ce83-490b-89ab-30ac5124fb9e" containerID="8e7997c0caad1a552fe2332fd0d8a8db915e53e846e93b0eca9b75466c8c9ff7" exitCode=0 Dec 01 19:38:29 crc kubenswrapper[4888]: I1201 19:38:29.399439 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcn8p" event={"ID":"42ea68bc-ce83-490b-89ab-30ac5124fb9e","Type":"ContainerDied","Data":"8e7997c0caad1a552fe2332fd0d8a8db915e53e846e93b0eca9b75466c8c9ff7"} Dec 01 19:38:29 crc kubenswrapper[4888]: I1201 19:38:29.399457 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcn8p" event={"ID":"42ea68bc-ce83-490b-89ab-30ac5124fb9e","Type":"ContainerStarted","Data":"7ac261ed5301fb1da151f136ad18f9ceec264c04dd96359d67a6daba5bbfa9be"} Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.272745 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vvwwp"] Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.274404 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.277719 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.282739 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvwwp"] Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.364478 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-catalog-content\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.364546 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw7rx\" (UniqueName: \"kubernetes.io/projected/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-kube-api-access-sw7rx\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.364574 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-utilities\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.405709 4888 generic.go:334] "Generic (PLEG): container finished" podID="42ea68bc-ce83-490b-89ab-30ac5124fb9e" containerID="6ba1ccdf0b103e60b60fc14ec034e87f8ef323cd00b52a222917eab0edecec33" exitCode=0 Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.405776 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcn8p" event={"ID":"42ea68bc-ce83-490b-89ab-30ac5124fb9e","Type":"ContainerDied","Data":"6ba1ccdf0b103e60b60fc14ec034e87f8ef323cd00b52a222917eab0edecec33"} Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.465527 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-catalog-content\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.465591 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw7rx\" (UniqueName: \"kubernetes.io/projected/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-kube-api-access-sw7rx\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.465610 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-utilities\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.466039 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-utilities\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.469116 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-catalog-content\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.470320 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5ntwc"] Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.472049 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.475266 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.478088 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5ntwc"] Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.492503 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw7rx\" (UniqueName: \"kubernetes.io/projected/3249d3f3-6a27-4acd-8d8b-4c5360dc7f43-kube-api-access-sw7rx\") pod \"certified-operators-vvwwp\" (UID: \"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43\") " pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.566488 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-catalog-content\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.567005 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8v7v\" (UniqueName: \"kubernetes.io/projected/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-kube-api-access-p8v7v\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.567139 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-utilities\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.668844 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8v7v\" (UniqueName: \"kubernetes.io/projected/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-kube-api-access-p8v7v\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.668891 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-utilities\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.668921 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-catalog-content\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.669382 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-catalog-content\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.670237 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-utilities\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.688748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8v7v\" (UniqueName: \"kubernetes.io/projected/8c488b93-cfe9-4b9e-9299-1e2b011e84bc-kube-api-access-p8v7v\") pod \"community-operators-5ntwc\" (UID: \"8c488b93-cfe9-4b9e-9299-1e2b011e84bc\") " pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.704442 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:30 crc kubenswrapper[4888]: I1201 19:38:30.813561 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.140941 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvwwp"] Dec 01 19:38:31 crc kubenswrapper[4888]: W1201 19:38:31.146836 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3249d3f3_6a27_4acd_8d8b_4c5360dc7f43.slice/crio-85389e4b4ba42ca800de6a174d6d7621b4f70d567c351c5c85fd7db91b64f36a WatchSource:0}: Error finding container 85389e4b4ba42ca800de6a174d6d7621b4f70d567c351c5c85fd7db91b64f36a: Status 404 returned error can't find the container with id 85389e4b4ba42ca800de6a174d6d7621b4f70d567c351c5c85fd7db91b64f36a Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.214247 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5ntwc"] Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.413053 4888 generic.go:334] "Generic (PLEG): container finished" podID="3249d3f3-6a27-4acd-8d8b-4c5360dc7f43" containerID="ccf24b398c4aeb04dc0a1a7cf103ac650e1a101a3e6a9432414b0063e740306e" exitCode=0 Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.413141 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvwwp" event={"ID":"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43","Type":"ContainerDied","Data":"ccf24b398c4aeb04dc0a1a7cf103ac650e1a101a3e6a9432414b0063e740306e"} Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.413196 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvwwp" event={"ID":"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43","Type":"ContainerStarted","Data":"85389e4b4ba42ca800de6a174d6d7621b4f70d567c351c5c85fd7db91b64f36a"} Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.417618 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gcn8p" event={"ID":"42ea68bc-ce83-490b-89ab-30ac5124fb9e","Type":"ContainerStarted","Data":"70909f40da13eaf98fed72025f0d89cb199e80af86f04a97d63f7ec0586d3d9a"} Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.422169 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntwc" event={"ID":"8c488b93-cfe9-4b9e-9299-1e2b011e84bc","Type":"ContainerStarted","Data":"aa73d6938552f453cabc2c24be4017337c1efe810d79570d624570537b275274"} Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.424478 4888 generic.go:334] "Generic (PLEG): container finished" podID="a1f98748-d4c4-4e86-93a3-5ebe405250ee" containerID="3251259744ef8116867d2dd32a9d0c95ea7d17ab2ee790d45c2a5d9bf5e9cccb" exitCode=0 Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.424532 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jp5pm" event={"ID":"a1f98748-d4c4-4e86-93a3-5ebe405250ee","Type":"ContainerDied","Data":"3251259744ef8116867d2dd32a9d0c95ea7d17ab2ee790d45c2a5d9bf5e9cccb"} Dec 01 19:38:31 crc kubenswrapper[4888]: I1201 19:38:31.483680 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gcn8p" podStartSLOduration=3.038677762 podStartE2EDuration="4.483651022s" podCreationTimestamp="2025-12-01 19:38:27 +0000 UTC" firstStartedPulling="2025-12-01 19:38:29.400780111 +0000 UTC m=+309.271810025" lastFinishedPulling="2025-12-01 19:38:30.845753371 +0000 UTC m=+310.716783285" observedRunningTime="2025-12-01 19:38:31.480347647 +0000 UTC m=+311.351377561" watchObservedRunningTime="2025-12-01 19:38:31.483651022 +0000 UTC m=+311.354680936" Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.442751 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvwwp" event={"ID":"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43","Type":"ContainerStarted","Data":"b30cd0f0ac360a2a01c3c6d53e3bb64e11b89d90d7bff7fbb7911ce0769b36c9"} Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.445343 4888 generic.go:334] "Generic (PLEG): container finished" podID="8c488b93-cfe9-4b9e-9299-1e2b011e84bc" containerID="fdd0b894c71149372e5d1412fc4dac414257967a0a4b7453e3251fb1a081ac3d" exitCode=0 Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.445484 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntwc" event={"ID":"8c488b93-cfe9-4b9e-9299-1e2b011e84bc","Type":"ContainerDied","Data":"fdd0b894c71149372e5d1412fc4dac414257967a0a4b7453e3251fb1a081ac3d"} Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.461449 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jp5pm" event={"ID":"a1f98748-d4c4-4e86-93a3-5ebe405250ee","Type":"ContainerStarted","Data":"1a34dca3f92dc0e2a3e0d77c49d9ceb1dd6aa7e510cb7bcd0a7a4e6570f5d3d7"} Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.489166 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jp5pm" podStartSLOduration=3.061949343 podStartE2EDuration="5.489138152s" podCreationTimestamp="2025-12-01 19:38:27 +0000 UTC" firstStartedPulling="2025-12-01 19:38:29.399958997 +0000 UTC m=+309.270988911" lastFinishedPulling="2025-12-01 19:38:31.827147806 +0000 UTC m=+311.698177720" observedRunningTime="2025-12-01 19:38:32.485168937 +0000 UTC m=+312.356198871" watchObservedRunningTime="2025-12-01 19:38:32.489138152 +0000 UTC m=+312.360168066" Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.722889 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.723328 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" podUID="ff4272eb-f24b-418c-b389-5e9444e1e18e" containerName="controller-manager" containerID="cri-o://9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949" gracePeriod=30 Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.743044 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:32 crc kubenswrapper[4888]: I1201 19:38:32.743279 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" podUID="f19b1a74-e044-4e34-b59a-0aab06417e9f" containerName="route-controller-manager" containerID="cri-o://47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51" gracePeriod=30 Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.310137 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.317372 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.409926 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config\") pod \"ff4272eb-f24b-418c-b389-5e9444e1e18e\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410061 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca\") pod \"ff4272eb-f24b-418c-b389-5e9444e1e18e\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410097 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert\") pod \"f19b1a74-e044-4e34-b59a-0aab06417e9f\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410126 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k76t\" (UniqueName: \"kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t\") pod \"f19b1a74-e044-4e34-b59a-0aab06417e9f\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410164 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dg7l\" (UniqueName: \"kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l\") pod \"ff4272eb-f24b-418c-b389-5e9444e1e18e\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410240 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert\") pod \"ff4272eb-f24b-418c-b389-5e9444e1e18e\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410277 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles\") pod \"ff4272eb-f24b-418c-b389-5e9444e1e18e\" (UID: \"ff4272eb-f24b-418c-b389-5e9444e1e18e\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410294 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca\") pod \"f19b1a74-e044-4e34-b59a-0aab06417e9f\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.410332 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config\") pod \"f19b1a74-e044-4e34-b59a-0aab06417e9f\" (UID: \"f19b1a74-e044-4e34-b59a-0aab06417e9f\") " Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.411861 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config" (OuterVolumeSpecName: "config") pod "f19b1a74-e044-4e34-b59a-0aab06417e9f" (UID: "f19b1a74-e044-4e34-b59a-0aab06417e9f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.412403 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca" (OuterVolumeSpecName: "client-ca") pod "ff4272eb-f24b-418c-b389-5e9444e1e18e" (UID: "ff4272eb-f24b-418c-b389-5e9444e1e18e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.413073 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config" (OuterVolumeSpecName: "config") pod "ff4272eb-f24b-418c-b389-5e9444e1e18e" (UID: "ff4272eb-f24b-418c-b389-5e9444e1e18e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.413673 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "ff4272eb-f24b-418c-b389-5e9444e1e18e" (UID: "ff4272eb-f24b-418c-b389-5e9444e1e18e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.417837 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f19b1a74-e044-4e34-b59a-0aab06417e9f" (UID: "f19b1a74-e044-4e34-b59a-0aab06417e9f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.418257 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ff4272eb-f24b-418c-b389-5e9444e1e18e" (UID: "ff4272eb-f24b-418c-b389-5e9444e1e18e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.418354 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l" (OuterVolumeSpecName: "kube-api-access-7dg7l") pod "ff4272eb-f24b-418c-b389-5e9444e1e18e" (UID: "ff4272eb-f24b-418c-b389-5e9444e1e18e"). InnerVolumeSpecName "kube-api-access-7dg7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.418486 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca" (OuterVolumeSpecName: "client-ca") pod "f19b1a74-e044-4e34-b59a-0aab06417e9f" (UID: "f19b1a74-e044-4e34-b59a-0aab06417e9f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.422268 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t" (OuterVolumeSpecName: "kube-api-access-2k76t") pod "f19b1a74-e044-4e34-b59a-0aab06417e9f" (UID: "f19b1a74-e044-4e34-b59a-0aab06417e9f"). InnerVolumeSpecName "kube-api-access-2k76t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.466050 4888 generic.go:334] "Generic (PLEG): container finished" podID="ff4272eb-f24b-418c-b389-5e9444e1e18e" containerID="9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949" exitCode=0 Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.466248 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" event={"ID":"ff4272eb-f24b-418c-b389-5e9444e1e18e","Type":"ContainerDied","Data":"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949"} Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.466297 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" event={"ID":"ff4272eb-f24b-418c-b389-5e9444e1e18e","Type":"ContainerDied","Data":"40b0d2d4df33046d2a292950e6d98d6ee441d2ca95cc1357d4dbf1893f2cd700"} Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.466322 4888 scope.go:117] "RemoveContainer" containerID="9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.466338 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.475559 4888 generic.go:334] "Generic (PLEG): container finished" podID="f19b1a74-e044-4e34-b59a-0aab06417e9f" containerID="47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51" exitCode=0 Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.475745 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" event={"ID":"f19b1a74-e044-4e34-b59a-0aab06417e9f","Type":"ContainerDied","Data":"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51"} Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.475806 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.475836 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb" event={"ID":"f19b1a74-e044-4e34-b59a-0aab06417e9f","Type":"ContainerDied","Data":"b37c5893732d57ef30751c724a579e3d6d5752f65b644b031f604e2f94932192"} Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.480056 4888 generic.go:334] "Generic (PLEG): container finished" podID="3249d3f3-6a27-4acd-8d8b-4c5360dc7f43" containerID="b30cd0f0ac360a2a01c3c6d53e3bb64e11b89d90d7bff7fbb7911ce0769b36c9" exitCode=0 Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.481208 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvwwp" event={"ID":"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43","Type":"ContainerDied","Data":"b30cd0f0ac360a2a01c3c6d53e3bb64e11b89d90d7bff7fbb7911ce0769b36c9"} Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.490649 4888 scope.go:117] "RemoveContainer" containerID="9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949" Dec 01 19:38:33 crc kubenswrapper[4888]: E1201 19:38:33.491090 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949\": container with ID starting with 9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949 not found: ID does not exist" containerID="9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.491147 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949"} err="failed to get container status \"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949\": rpc error: code = NotFound desc = could not find container \"9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949\": container with ID starting with 9ad104490d34f44c42bf454e37e5d736b58959acd92c0f70c25bbf9236815949 not found: ID does not exist" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.491180 4888 scope.go:117] "RemoveContainer" containerID="47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517404 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dg7l\" (UniqueName: \"kubernetes.io/projected/ff4272eb-f24b-418c-b389-5e9444e1e18e-kube-api-access-7dg7l\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517479 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff4272eb-f24b-418c-b389-5e9444e1e18e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517496 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517510 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517521 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f19b1a74-e044-4e34-b59a-0aab06417e9f-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517532 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517541 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff4272eb-f24b-418c-b389-5e9444e1e18e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517551 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f19b1a74-e044-4e34-b59a-0aab06417e9f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.517561 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k76t\" (UniqueName: \"kubernetes.io/projected/f19b1a74-e044-4e34-b59a-0aab06417e9f-kube-api-access-2k76t\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.518313 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.520496 4888 scope.go:117] "RemoveContainer" containerID="47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51" Dec 01 19:38:33 crc kubenswrapper[4888]: E1201 19:38:33.521689 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51\": container with ID starting with 47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51 not found: ID does not exist" containerID="47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.521741 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51"} err="failed to get container status \"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51\": rpc error: code = NotFound desc = could not find container \"47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51\": container with ID starting with 47c3ccf948aad25e779e0700fc877c769cd5b09db8a30ae72340b3e2b92f0b51 not found: ID does not exist" Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.528771 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7f47dcb4b4-nxs4l"] Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.540367 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:33 crc kubenswrapper[4888]: I1201 19:38:33.546221 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-555cb4d5dd-2tgnb"] Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.459756 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f19b1a74-e044-4e34-b59a-0aab06417e9f" path="/var/lib/kubelet/pods/f19b1a74-e044-4e34-b59a-0aab06417e9f/volumes" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.460824 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff4272eb-f24b-418c-b389-5e9444e1e18e" path="/var/lib/kubelet/pods/ff4272eb-f24b-418c-b389-5e9444e1e18e/volumes" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.487310 4888 generic.go:334] "Generic (PLEG): container finished" podID="8c488b93-cfe9-4b9e-9299-1e2b011e84bc" containerID="532747aa38e5587bf7da97ff79f06bb456b1904ab47915cd56df21d06539e91d" exitCode=0 Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.487445 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntwc" event={"ID":"8c488b93-cfe9-4b9e-9299-1e2b011e84bc","Type":"ContainerDied","Data":"532747aa38e5587bf7da97ff79f06bb456b1904ab47915cd56df21d06539e91d"} Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.491425 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvwwp" event={"ID":"3249d3f3-6a27-4acd-8d8b-4c5360dc7f43","Type":"ContainerStarted","Data":"8a451979964de0ce3b083f8329b54514d97437ad1ae3f1266836341e182685fb"} Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.527758 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vvwwp" podStartSLOduration=1.7535988059999998 podStartE2EDuration="4.527725376s" podCreationTimestamp="2025-12-01 19:38:30 +0000 UTC" firstStartedPulling="2025-12-01 19:38:31.414813698 +0000 UTC m=+311.285843612" lastFinishedPulling="2025-12-01 19:38:34.188940258 +0000 UTC m=+314.059970182" observedRunningTime="2025-12-01 19:38:34.524786381 +0000 UTC m=+314.395816295" watchObservedRunningTime="2025-12-01 19:38:34.527725376 +0000 UTC m=+314.398755300" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.756857 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:34 crc kubenswrapper[4888]: E1201 19:38:34.757278 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19b1a74-e044-4e34-b59a-0aab06417e9f" containerName="route-controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.757300 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19b1a74-e044-4e34-b59a-0aab06417e9f" containerName="route-controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: E1201 19:38:34.757320 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4272eb-f24b-418c-b389-5e9444e1e18e" containerName="controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.757328 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4272eb-f24b-418c-b389-5e9444e1e18e" containerName="controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.757462 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19b1a74-e044-4e34-b59a-0aab06417e9f" containerName="route-controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.757475 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4272eb-f24b-418c-b389-5e9444e1e18e" containerName="controller-manager" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.758012 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.761269 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.761653 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.761732 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.761957 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.762029 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z"] Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.762708 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.762986 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.763693 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.766869 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.767165 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.767459 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.767627 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.767871 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.768020 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.769477 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.772963 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.804877 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z"] Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834473 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h7w5\" (UniqueName: \"kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834534 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834556 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/451e2087-6482-4759-ac9f-681db2305681-serving-cert\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834757 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834837 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2f4t\" (UniqueName: \"kubernetes.io/projected/451e2087-6482-4759-ac9f-681db2305681-kube-api-access-d2f4t\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834860 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.834931 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-client-ca\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.835009 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-config\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.835038 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935720 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-config\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935778 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935822 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h7w5\" (UniqueName: \"kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935846 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935868 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/451e2087-6482-4759-ac9f-681db2305681-serving-cert\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935911 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935934 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2f4t\" (UniqueName: \"kubernetes.io/projected/451e2087-6482-4759-ac9f-681db2305681-kube-api-access-d2f4t\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.935955 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.936000 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-client-ca\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.937013 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.937072 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-config\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.937405 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.937652 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.938096 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/451e2087-6482-4759-ac9f-681db2305681-client-ca\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.946332 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.949628 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/451e2087-6482-4759-ac9f-681db2305681-serving-cert\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.952748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2f4t\" (UniqueName: \"kubernetes.io/projected/451e2087-6482-4759-ac9f-681db2305681-kube-api-access-d2f4t\") pod \"route-controller-manager-7fb99f6db9-9ql4z\" (UID: \"451e2087-6482-4759-ac9f-681db2305681\") " pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:34 crc kubenswrapper[4888]: I1201 19:38:34.956815 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h7w5\" (UniqueName: \"kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5\") pod \"controller-manager-74c49c658f-s5z6d\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:35 crc kubenswrapper[4888]: I1201 19:38:35.086068 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:35 crc kubenswrapper[4888]: I1201 19:38:35.102492 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:35 crc kubenswrapper[4888]: I1201 19:38:35.364219 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z"] Dec 01 19:38:35 crc kubenswrapper[4888]: I1201 19:38:35.501748 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" event={"ID":"451e2087-6482-4759-ac9f-681db2305681","Type":"ContainerStarted","Data":"9fed342a97b2b2aca5c6737e06f34fe68f2f0f64cd2629fcb620ac177cd3bd8d"} Dec 01 19:38:35 crc kubenswrapper[4888]: I1201 19:38:35.516866 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:35 crc kubenswrapper[4888]: W1201 19:38:35.526201 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb9162d4_5d5d_4f4f_be6b_9702e20c4eca.slice/crio-c357a6cec7295d728983c290a49a588313afdc7aae79cab576d1057110ac3844 WatchSource:0}: Error finding container c357a6cec7295d728983c290a49a588313afdc7aae79cab576d1057110ac3844: Status 404 returned error can't find the container with id c357a6cec7295d728983c290a49a588313afdc7aae79cab576d1057110ac3844 Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.508325 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5ntwc" event={"ID":"8c488b93-cfe9-4b9e-9299-1e2b011e84bc","Type":"ContainerStarted","Data":"1a2793ed882f9bfdf23c5fa6139b0bf6db2e2635a2753d01f93761f3d2178065"} Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.510797 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" event={"ID":"451e2087-6482-4759-ac9f-681db2305681","Type":"ContainerStarted","Data":"a0296bdba84b32bcaeda4dcd8032a1057c04931aea7f8721f1813f607ccb3846"} Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.511124 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.512527 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" event={"ID":"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca","Type":"ContainerStarted","Data":"d6cb8387444ee2c578d3be628424fe306810a6622d1567d3293e946872666fc6"} Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.512559 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" event={"ID":"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca","Type":"ContainerStarted","Data":"c357a6cec7295d728983c290a49a588313afdc7aae79cab576d1057110ac3844"} Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.512755 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.516003 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.516813 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.527791 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5ntwc" podStartSLOduration=3.435384633 podStartE2EDuration="6.52777138s" podCreationTimestamp="2025-12-01 19:38:30 +0000 UTC" firstStartedPulling="2025-12-01 19:38:32.447319036 +0000 UTC m=+312.318348940" lastFinishedPulling="2025-12-01 19:38:35.539705773 +0000 UTC m=+315.410735687" observedRunningTime="2025-12-01 19:38:36.525051392 +0000 UTC m=+316.396081316" watchObservedRunningTime="2025-12-01 19:38:36.52777138 +0000 UTC m=+316.398801294" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.541410 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7fb99f6db9-9ql4z" podStartSLOduration=4.541385683 podStartE2EDuration="4.541385683s" podCreationTimestamp="2025-12-01 19:38:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:38:36.540206669 +0000 UTC m=+316.411236583" watchObservedRunningTime="2025-12-01 19:38:36.541385683 +0000 UTC m=+316.412415597" Dec 01 19:38:36 crc kubenswrapper[4888]: I1201 19:38:36.561549 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" podStartSLOduration=4.561526713 podStartE2EDuration="4.561526713s" podCreationTimestamp="2025-12-01 19:38:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:38:36.558397943 +0000 UTC m=+316.429427857" watchObservedRunningTime="2025-12-01 19:38:36.561526713 +0000 UTC m=+316.432556627" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.037418 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.037885 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.080803 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.196370 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.196428 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.229293 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.560650 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gcn8p" Dec 01 19:38:38 crc kubenswrapper[4888]: I1201 19:38:38.568519 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jp5pm" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.705344 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.705746 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.744242 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.814190 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.814259 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:40 crc kubenswrapper[4888]: I1201 19:38:40.857673 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:41 crc kubenswrapper[4888]: I1201 19:38:41.587385 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vvwwp" Dec 01 19:38:41 crc kubenswrapper[4888]: I1201 19:38:41.589489 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5ntwc" Dec 01 19:38:43 crc kubenswrapper[4888]: I1201 19:38:43.027386 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:43 crc kubenswrapper[4888]: I1201 19:38:43.027632 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerName="controller-manager" containerID="cri-o://d6cb8387444ee2c578d3be628424fe306810a6622d1567d3293e946872666fc6" gracePeriod=30 Dec 01 19:38:44 crc kubenswrapper[4888]: I1201 19:38:44.561926 4888 generic.go:334] "Generic (PLEG): container finished" podID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerID="d6cb8387444ee2c578d3be628424fe306810a6622d1567d3293e946872666fc6" exitCode=0 Dec 01 19:38:44 crc kubenswrapper[4888]: I1201 19:38:44.562071 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" event={"ID":"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca","Type":"ContainerDied","Data":"d6cb8387444ee2c578d3be628424fe306810a6622d1567d3293e946872666fc6"} Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.099715 4888 patch_prober.go:28] interesting pod/controller-manager-74c49c658f-s5z6d container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.099780 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.542993 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.565404 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7765dd68cf-6hp9g"] Dec 01 19:38:45 crc kubenswrapper[4888]: E1201 19:38:45.565599 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerName="controller-manager" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.565612 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerName="controller-manager" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.565714 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" containerName="controller-manager" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.566063 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.568811 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" event={"ID":"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca","Type":"ContainerDied","Data":"c357a6cec7295d728983c290a49a588313afdc7aae79cab576d1057110ac3844"} Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.568864 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74c49c658f-s5z6d" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.568870 4888 scope.go:117] "RemoveContainer" containerID="d6cb8387444ee2c578d3be628424fe306810a6622d1567d3293e946872666fc6" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.586105 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7765dd68cf-6hp9g"] Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604051 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles\") pod \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604149 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca\") pod \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604714 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" (UID: "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604732 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca" (OuterVolumeSpecName: "client-ca") pod "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" (UID: "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604886 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert\") pod \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.604989 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config\") pod \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.605599 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config" (OuterVolumeSpecName: "config") pod "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" (UID: "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.605675 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h7w5\" (UniqueName: \"kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5\") pod \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\" (UID: \"cb9162d4-5d5d-4f4f-be6b-9702e20c4eca\") " Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.606119 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.606140 4888 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.606153 4888 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.611844 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5" (OuterVolumeSpecName: "kube-api-access-4h7w5") pod "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" (UID: "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca"). InnerVolumeSpecName "kube-api-access-4h7w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.611889 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" (UID: "cb9162d4-5d5d-4f4f-be6b-9702e20c4eca"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.706920 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdk95\" (UniqueName: \"kubernetes.io/projected/c867ae62-216c-4a1b-9a9a-ab52cbd76986-kube-api-access-fdk95\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.706971 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-proxy-ca-bundles\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.707007 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c867ae62-216c-4a1b-9a9a-ab52cbd76986-serving-cert\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.707172 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-client-ca\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.707264 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-config\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.707371 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h7w5\" (UniqueName: \"kubernetes.io/projected/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-kube-api-access-4h7w5\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.707392 4888 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.808852 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-client-ca\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.808906 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-config\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.808964 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdk95\" (UniqueName: \"kubernetes.io/projected/c867ae62-216c-4a1b-9a9a-ab52cbd76986-kube-api-access-fdk95\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.808995 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-proxy-ca-bundles\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.809041 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c867ae62-216c-4a1b-9a9a-ab52cbd76986-serving-cert\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.810101 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-proxy-ca-bundles\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.810373 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-config\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.810960 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c867ae62-216c-4a1b-9a9a-ab52cbd76986-client-ca\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.819210 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c867ae62-216c-4a1b-9a9a-ab52cbd76986-serving-cert\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.825021 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdk95\" (UniqueName: \"kubernetes.io/projected/c867ae62-216c-4a1b-9a9a-ab52cbd76986-kube-api-access-fdk95\") pod \"controller-manager-7765dd68cf-6hp9g\" (UID: \"c867ae62-216c-4a1b-9a9a-ab52cbd76986\") " pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.896971 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.900151 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-74c49c658f-s5z6d"] Dec 01 19:38:45 crc kubenswrapper[4888]: I1201 19:38:45.901669 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.094722 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7765dd68cf-6hp9g"] Dec 01 19:38:46 crc kubenswrapper[4888]: W1201 19:38:46.103980 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc867ae62_216c_4a1b_9a9a_ab52cbd76986.slice/crio-86b760554a8aa5bf16d38a618a9b6ee8a1c0e755959036ecc96f8be538dae8c2 WatchSource:0}: Error finding container 86b760554a8aa5bf16d38a618a9b6ee8a1c0e755959036ecc96f8be538dae8c2: Status 404 returned error can't find the container with id 86b760554a8aa5bf16d38a618a9b6ee8a1c0e755959036ecc96f8be538dae8c2 Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.457924 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb9162d4-5d5d-4f4f-be6b-9702e20c4eca" path="/var/lib/kubelet/pods/cb9162d4-5d5d-4f4f-be6b-9702e20c4eca/volumes" Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.575571 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" event={"ID":"c867ae62-216c-4a1b-9a9a-ab52cbd76986","Type":"ContainerStarted","Data":"582a449b8dc2b5455c0593615f6667bd269ff985f813502404ac0dfde4760f1f"} Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.575622 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" event={"ID":"c867ae62-216c-4a1b-9a9a-ab52cbd76986","Type":"ContainerStarted","Data":"86b760554a8aa5bf16d38a618a9b6ee8a1c0e755959036ecc96f8be538dae8c2"} Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.577336 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.587151 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" Dec 01 19:38:46 crc kubenswrapper[4888]: I1201 19:38:46.599536 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7765dd68cf-6hp9g" podStartSLOduration=3.599514619 podStartE2EDuration="3.599514619s" podCreationTimestamp="2025-12-01 19:38:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:38:46.596697908 +0000 UTC m=+326.467727832" watchObservedRunningTime="2025-12-01 19:38:46.599514619 +0000 UTC m=+326.470544533" Dec 01 19:38:50 crc kubenswrapper[4888]: I1201 19:38:50.037981 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:38:50 crc kubenswrapper[4888]: I1201 19:38:50.038669 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.430133 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-56zwn"] Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.431704 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.446832 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-56zwn"] Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.513937 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-registry-certificates\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514258 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrdnv\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-kube-api-access-hrdnv\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514287 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-bound-sa-token\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514346 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37b2a037-1613-4f83-be61-580c874d6be2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514374 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514519 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-registry-tls\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514601 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37b2a037-1613-4f83-be61-580c874d6be2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.514710 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-trusted-ca\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.538049 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.616491 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-registry-certificates\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.616557 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrdnv\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-kube-api-access-hrdnv\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.616577 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-bound-sa-token\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.616614 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37b2a037-1613-4f83-be61-580c874d6be2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.617356 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-registry-tls\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.617485 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37b2a037-1613-4f83-be61-580c874d6be2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.617566 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-trusted-ca\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.617857 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/37b2a037-1613-4f83-be61-580c874d6be2-ca-trust-extracted\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.618633 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-trusted-ca\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.618856 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/37b2a037-1613-4f83-be61-580c874d6be2-registry-certificates\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.621629 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-registry-tls\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.621831 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/37b2a037-1613-4f83-be61-580c874d6be2-installation-pull-secrets\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.631438 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-bound-sa-token\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.632431 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrdnv\" (UniqueName: \"kubernetes.io/projected/37b2a037-1613-4f83-be61-580c874d6be2-kube-api-access-hrdnv\") pod \"image-registry-66df7c8f76-56zwn\" (UID: \"37b2a037-1613-4f83-be61-580c874d6be2\") " pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:18 crc kubenswrapper[4888]: I1201 19:39:18.749373 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:19 crc kubenswrapper[4888]: I1201 19:39:19.148818 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-56zwn"] Dec 01 19:39:19 crc kubenswrapper[4888]: I1201 19:39:19.749518 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" event={"ID":"37b2a037-1613-4f83-be61-580c874d6be2","Type":"ContainerStarted","Data":"b8e1b38b53238eca5904dfb9a5bed7113effed4cd699046993cfe54aac388afb"} Dec 01 19:39:19 crc kubenswrapper[4888]: I1201 19:39:19.749563 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" event={"ID":"37b2a037-1613-4f83-be61-580c874d6be2","Type":"ContainerStarted","Data":"1a322474ee2410899da695889cb3c0bd3f4cd88cff63501b519f54b78efefb98"} Dec 01 19:39:19 crc kubenswrapper[4888]: I1201 19:39:19.749670 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:19 crc kubenswrapper[4888]: I1201 19:39:19.765456 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" podStartSLOduration=1.765435216 podStartE2EDuration="1.765435216s" podCreationTimestamp="2025-12-01 19:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:39:19.764252041 +0000 UTC m=+359.635281955" watchObservedRunningTime="2025-12-01 19:39:19.765435216 +0000 UTC m=+359.636465130" Dec 01 19:39:20 crc kubenswrapper[4888]: I1201 19:39:20.038136 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:39:20 crc kubenswrapper[4888]: I1201 19:39:20.038543 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:39:38 crc kubenswrapper[4888]: I1201 19:39:38.754636 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-56zwn" Dec 01 19:39:38 crc kubenswrapper[4888]: I1201 19:39:38.814129 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.038108 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.039355 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.039419 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.040136 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.040355 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055" gracePeriod=600 Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.907796 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055" exitCode=0 Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.907870 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055"} Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.908287 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1"} Dec 01 19:39:50 crc kubenswrapper[4888]: I1201 19:39:50.908342 4888 scope.go:117] "RemoveContainer" containerID="db35c42d0a6625cee0e80d70bd287b3b18ee926c9f912ca1c6aa470ec002d252" Dec 01 19:40:03 crc kubenswrapper[4888]: I1201 19:40:03.853568 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" podUID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" containerName="registry" containerID="cri-o://8bd6c7a7a72d154d5b39f0af535ba41689615ee51ac615efbc4319561774bd85" gracePeriod=30 Dec 01 19:40:03 crc kubenswrapper[4888]: I1201 19:40:03.978263 4888 generic.go:334] "Generic (PLEG): container finished" podID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" containerID="8bd6c7a7a72d154d5b39f0af535ba41689615ee51ac615efbc4319561774bd85" exitCode=0 Dec 01 19:40:03 crc kubenswrapper[4888]: I1201 19:40:03.978325 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" event={"ID":"284ef7ed-219e-48bd-8b60-9d16ae856c9e","Type":"ContainerDied","Data":"8bd6c7a7a72d154d5b39f0af535ba41689615ee51ac615efbc4319561774bd85"} Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.234760 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285203 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285345 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285429 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285466 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285763 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285796 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285818 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4ft4\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.285843 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets\") pod \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\" (UID: \"284ef7ed-219e-48bd-8b60-9d16ae856c9e\") " Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.286763 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.287714 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.297280 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.297436 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.298174 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.298601 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.298949 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4" (OuterVolumeSpecName: "kube-api-access-t4ft4") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "kube-api-access-t4ft4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.320425 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "284ef7ed-219e-48bd-8b60-9d16ae856c9e" (UID: "284ef7ed-219e-48bd-8b60-9d16ae856c9e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386869 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386924 4888 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/284ef7ed-219e-48bd-8b60-9d16ae856c9e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386937 4888 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386945 4888 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386956 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4ft4\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-kube-api-access-t4ft4\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386966 4888 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/284ef7ed-219e-48bd-8b60-9d16ae856c9e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.386973 4888 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/284ef7ed-219e-48bd-8b60-9d16ae856c9e-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.987408 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" event={"ID":"284ef7ed-219e-48bd-8b60-9d16ae856c9e","Type":"ContainerDied","Data":"5fcb0b1894a3e8d250a23fefa74f4181fa650b9e77996503d5ec844cbf02749c"} Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.987499 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-98xb2" Dec 01 19:40:04 crc kubenswrapper[4888]: I1201 19:40:04.988120 4888 scope.go:117] "RemoveContainer" containerID="8bd6c7a7a72d154d5b39f0af535ba41689615ee51ac615efbc4319561774bd85" Dec 01 19:40:05 crc kubenswrapper[4888]: I1201 19:40:05.012787 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:40:05 crc kubenswrapper[4888]: I1201 19:40:05.016266 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-98xb2"] Dec 01 19:40:06 crc kubenswrapper[4888]: I1201 19:40:06.461553 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" path="/var/lib/kubelet/pods/284ef7ed-219e-48bd-8b60-9d16ae856c9e/volumes" Dec 01 19:41:50 crc kubenswrapper[4888]: I1201 19:41:50.038315 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:41:50 crc kubenswrapper[4888]: I1201 19:41:50.038876 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:42:20 crc kubenswrapper[4888]: I1201 19:42:20.038618 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:42:20 crc kubenswrapper[4888]: I1201 19:42:20.039123 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:42:50 crc kubenswrapper[4888]: I1201 19:42:50.037928 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:42:50 crc kubenswrapper[4888]: I1201 19:42:50.038444 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:42:50 crc kubenswrapper[4888]: I1201 19:42:50.038485 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:42:50 crc kubenswrapper[4888]: I1201 19:42:50.039985 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:42:50 crc kubenswrapper[4888]: I1201 19:42:50.040054 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1" gracePeriod=600 Dec 01 19:42:51 crc kubenswrapper[4888]: I1201 19:42:51.003102 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1" exitCode=0 Dec 01 19:42:51 crc kubenswrapper[4888]: I1201 19:42:51.003618 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1"} Dec 01 19:42:51 crc kubenswrapper[4888]: I1201 19:42:51.003642 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d"} Dec 01 19:42:51 crc kubenswrapper[4888]: I1201 19:42:51.003657 4888 scope.go:117] "RemoveContainer" containerID="ebde62c1277053ae0054da47d56a05cc594a3d11abcd1a107fd77646206c3055" Dec 01 19:43:20 crc kubenswrapper[4888]: I1201 19:43:20.651888 4888 scope.go:117] "RemoveContainer" containerID="387e86d43628ec9b7d83f03e75f764c7495c11bf52fbb3dbd70d7f2a58bce712" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.526473 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-llsnr"] Dec 01 19:44:20 crc kubenswrapper[4888]: E1201 19:44:20.527329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" containerName="registry" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.527343 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" containerName="registry" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.527433 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="284ef7ed-219e-48bd-8b60-9d16ae856c9e" containerName="registry" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.527771 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.530056 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.530580 4888 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-l2kbn" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.536765 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.537340 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-td6nx"] Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.538009 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-td6nx" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.539693 4888 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-9gv7j" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.557064 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-td6nx"] Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.570094 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-llsnr"] Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.584933 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6mj44"] Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.586352 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.592658 4888 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-p8wc2" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.595447 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6mj44"] Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.599054 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s766f\" (UniqueName: \"kubernetes.io/projected/5695a357-bd5b-42c3-952b-f2be7e800dce-kube-api-access-s766f\") pod \"cert-manager-cainjector-7f985d654d-llsnr\" (UID: \"5695a357-bd5b-42c3-952b-f2be7e800dce\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.599164 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8xlz\" (UniqueName: \"kubernetes.io/projected/fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858-kube-api-access-l8xlz\") pod \"cert-manager-webhook-5655c58dd6-6mj44\" (UID: \"fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.599253 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2tjc\" (UniqueName: \"kubernetes.io/projected/68662703-dd1d-4a5f-8884-d79b491c4fe2-kube-api-access-x2tjc\") pod \"cert-manager-5b446d88c5-td6nx\" (UID: \"68662703-dd1d-4a5f-8884-d79b491c4fe2\") " pod="cert-manager/cert-manager-5b446d88c5-td6nx" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.700255 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2tjc\" (UniqueName: \"kubernetes.io/projected/68662703-dd1d-4a5f-8884-d79b491c4fe2-kube-api-access-x2tjc\") pod \"cert-manager-5b446d88c5-td6nx\" (UID: \"68662703-dd1d-4a5f-8884-d79b491c4fe2\") " pod="cert-manager/cert-manager-5b446d88c5-td6nx" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.700350 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s766f\" (UniqueName: \"kubernetes.io/projected/5695a357-bd5b-42c3-952b-f2be7e800dce-kube-api-access-s766f\") pod \"cert-manager-cainjector-7f985d654d-llsnr\" (UID: \"5695a357-bd5b-42c3-952b-f2be7e800dce\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.700393 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8xlz\" (UniqueName: \"kubernetes.io/projected/fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858-kube-api-access-l8xlz\") pod \"cert-manager-webhook-5655c58dd6-6mj44\" (UID: \"fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.722544 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8xlz\" (UniqueName: \"kubernetes.io/projected/fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858-kube-api-access-l8xlz\") pod \"cert-manager-webhook-5655c58dd6-6mj44\" (UID: \"fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.722830 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2tjc\" (UniqueName: \"kubernetes.io/projected/68662703-dd1d-4a5f-8884-d79b491c4fe2-kube-api-access-x2tjc\") pod \"cert-manager-5b446d88c5-td6nx\" (UID: \"68662703-dd1d-4a5f-8884-d79b491c4fe2\") " pod="cert-manager/cert-manager-5b446d88c5-td6nx" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.730154 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s766f\" (UniqueName: \"kubernetes.io/projected/5695a357-bd5b-42c3-952b-f2be7e800dce-kube-api-access-s766f\") pod \"cert-manager-cainjector-7f985d654d-llsnr\" (UID: \"5695a357-bd5b-42c3-952b-f2be7e800dce\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.856058 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.858866 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-td6nx" Dec 01 19:44:20 crc kubenswrapper[4888]: I1201 19:44:20.907526 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.084097 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-td6nx"] Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.097863 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.124703 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-llsnr"] Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.364072 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6mj44"] Dec 01 19:44:21 crc kubenswrapper[4888]: W1201 19:44:21.367966 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe6ae8a5_6bae_469d_a22f_6cbeb2cc3858.slice/crio-c2710ce48726c130d7433ea55e708016584f2611dc514d85261f30e0e4f2b411 WatchSource:0}: Error finding container c2710ce48726c130d7433ea55e708016584f2611dc514d85261f30e0e4f2b411: Status 404 returned error can't find the container with id c2710ce48726c130d7433ea55e708016584f2611dc514d85261f30e0e4f2b411 Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.484846 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" event={"ID":"fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858","Type":"ContainerStarted","Data":"c2710ce48726c130d7433ea55e708016584f2611dc514d85261f30e0e4f2b411"} Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.486293 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-td6nx" event={"ID":"68662703-dd1d-4a5f-8884-d79b491c4fe2","Type":"ContainerStarted","Data":"ab8a21641e08444759ec9faea3dcb556dc97e6b34f453ebecf3d8bdec843cd5b"} Dec 01 19:44:21 crc kubenswrapper[4888]: I1201 19:44:21.487804 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" event={"ID":"5695a357-bd5b-42c3-952b-f2be7e800dce","Type":"ContainerStarted","Data":"2de080d346dd73847dd309ca5845774c8c2a9a4d5d711f10b85c848a35ebe46c"} Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.506307 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-td6nx" event={"ID":"68662703-dd1d-4a5f-8884-d79b491c4fe2","Type":"ContainerStarted","Data":"2a2bbf585c47c99299c53ae8786593a80407f740aefc9088b34c264ba8fddfcc"} Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.507907 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" event={"ID":"5695a357-bd5b-42c3-952b-f2be7e800dce","Type":"ContainerStarted","Data":"c579dd1ab9f4f04ac3a7f5756d256b02b89bd8dcd605053fb8be5c067e1d9613"} Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.509330 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" event={"ID":"fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858","Type":"ContainerStarted","Data":"661dbfb76635948a793046e3e98a353dc38c1247537d714a93061e50fe5dca69"} Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.509475 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.520724 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-td6nx" podStartSLOduration=1.779638966 podStartE2EDuration="4.520707267s" podCreationTimestamp="2025-12-01 19:44:20 +0000 UTC" firstStartedPulling="2025-12-01 19:44:21.097653162 +0000 UTC m=+660.968683076" lastFinishedPulling="2025-12-01 19:44:23.838721463 +0000 UTC m=+663.709751377" observedRunningTime="2025-12-01 19:44:24.51946155 +0000 UTC m=+664.390491464" watchObservedRunningTime="2025-12-01 19:44:24.520707267 +0000 UTC m=+664.391737181" Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.563505 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" podStartSLOduration=1.976355197 podStartE2EDuration="4.563488153s" podCreationTimestamp="2025-12-01 19:44:20 +0000 UTC" firstStartedPulling="2025-12-01 19:44:21.370499224 +0000 UTC m=+661.241529128" lastFinishedPulling="2025-12-01 19:44:23.95763217 +0000 UTC m=+663.828662084" observedRunningTime="2025-12-01 19:44:24.559517845 +0000 UTC m=+664.430547759" watchObservedRunningTime="2025-12-01 19:44:24.563488153 +0000 UTC m=+664.434518067" Dec 01 19:44:24 crc kubenswrapper[4888]: I1201 19:44:24.564365 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-llsnr" podStartSLOduration=1.803573605 podStartE2EDuration="4.564355209s" podCreationTimestamp="2025-12-01 19:44:20 +0000 UTC" firstStartedPulling="2025-12-01 19:44:21.133457221 +0000 UTC m=+661.004487135" lastFinishedPulling="2025-12-01 19:44:23.894238825 +0000 UTC m=+663.765268739" observedRunningTime="2025-12-01 19:44:24.546882302 +0000 UTC m=+664.417912216" watchObservedRunningTime="2025-12-01 19:44:24.564355209 +0000 UTC m=+664.435385133" Dec 01 19:44:30 crc kubenswrapper[4888]: I1201 19:44:30.913045 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-6mj44" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.054629 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f4wj6"] Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.054969 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-controller" containerID="cri-o://17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055313 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="sbdb" containerID="cri-o://402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055352 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="nbdb" containerID="cri-o://3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055380 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="northd" containerID="cri-o://3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055422 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055452 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-node" containerID="cri-o://51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.055488 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-acl-logging" containerID="cri-o://366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.096861 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" containerID="cri-o://91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" gracePeriod=30 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.420707 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/3.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.424501 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovn-acl-logging/0.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.425780 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovn-controller/0.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.426407 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.471305 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-84dzf"] Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472452 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472485 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472506 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-acl-logging" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472514 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-acl-logging" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472531 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472538 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472556 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472566 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472578 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="nbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472589 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="nbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472606 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472613 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472629 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="northd" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472637 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="northd" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472646 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472658 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472667 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-node" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472675 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-node" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472684 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="sbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472692 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="sbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472712 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472719 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.472734 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kubecfg-setup" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.472741 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kubecfg-setup" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473083 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473096 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-acl-logging" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473110 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473118 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-node" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473129 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="sbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473144 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="nbdb" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473157 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473175 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473198 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="northd" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473208 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovn-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.473691 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.473706 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.474202 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.474224 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerName="ovnkube-controller" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.478922 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.574610 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovnkube-controller/3.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.577120 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovn-acl-logging/0.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.577776 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-f4wj6_578ef97f-2ce3-405a-9f4e-fcaa5f98df07/ovn-controller/0.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579750 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579778 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579787 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579796 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579795 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579805 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579856 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579860 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579890 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" exitCode=0 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579920 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" exitCode=143 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579929 4888 generic.go:334] "Generic (PLEG): container finished" podID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" exitCode=143 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579843 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579959 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579981 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.579997 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580012 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580026 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580039 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580048 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580056 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580063 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580071 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580078 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580086 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580093 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580103 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580114 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580123 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580130 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580137 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580144 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580152 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580158 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580165 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580172 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580194 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580208 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580221 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580229 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580238 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580245 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580253 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580260 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580267 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580276 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580285 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580292 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580301 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f4wj6" event={"ID":"578ef97f-2ce3-405a-9f4e-fcaa5f98df07","Type":"ContainerDied","Data":"b0311eeaa7a47871b5e23b4f6bbaed96b2b8aa682be4987282edb20a2c4e2d26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580311 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580319 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580326 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580332 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580339 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580345 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580351 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580357 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580363 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.580369 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581341 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581394 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581418 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581477 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581488 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581429 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581556 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581598 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581623 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581629 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581644 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581678 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581678 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581706 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581735 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581745 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket" (OuterVolumeSpecName: "log-socket") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581760 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581978 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.581989 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash" (OuterVolumeSpecName: "host-slash") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582017 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582054 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582323 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582350 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbxc5\" (UniqueName: \"kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582368 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582404 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log" (OuterVolumeSpecName: "node-log") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582413 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582455 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582475 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582499 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582524 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582550 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582497 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582574 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config\") pod \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\" (UID: \"578ef97f-2ce3-405a-9f4e-fcaa5f98df07\") " Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582623 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/2.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582702 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582721 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-node-log\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582728 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582757 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-slash\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582784 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-log-socket\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582820 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-systemd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582841 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-var-lib-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582864 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582888 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65fl9\" (UniqueName: \"kubernetes.io/projected/1b2e0586-6957-4ede-b0cc-538dc8082425-kube-api-access-65fl9\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582907 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-bin\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582931 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-ovn\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582928 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.582988 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-script-lib\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583047 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-netd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583052 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583071 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-config\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583125 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-env-overrides\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583154 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-kubelet\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583177 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-systemd-units\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583215 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583259 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-etc-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583275 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b2e0586-6957-4ede-b0cc-538dc8082425-ovn-node-metrics-cert\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583296 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-netns\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583312 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583355 4888 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-node-log\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583364 4888 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583373 4888 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583382 4888 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583391 4888 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583399 4888 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583407 4888 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583415 4888 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583424 4888 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583431 4888 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583467 4888 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583475 4888 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583483 4888 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583492 4888 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-log-socket\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583500 4888 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583507 4888 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-slash\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.583516 4888 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.584082 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/1.log" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.584164 4888 generic.go:334] "Generic (PLEG): container finished" podID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" containerID="05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730" exitCode=2 Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.584256 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerDied","Data":"05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.584304 4888 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15"} Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.585239 4888 scope.go:117] "RemoveContainer" containerID="05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.585699 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hfpdh_openshift-multus(08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6)\"" pod="openshift-multus/multus-hfpdh" podUID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.588512 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5" (OuterVolumeSpecName: "kube-api-access-tbxc5") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "kube-api-access-tbxc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.588679 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.600697 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "578ef97f-2ce3-405a-9f4e-fcaa5f98df07" (UID: "578ef97f-2ce3-405a-9f4e-fcaa5f98df07"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.611349 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.630789 4888 scope.go:117] "RemoveContainer" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.644579 4888 scope.go:117] "RemoveContainer" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.658196 4888 scope.go:117] "RemoveContainer" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.672110 4888 scope.go:117] "RemoveContainer" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.683175 4888 scope.go:117] "RemoveContainer" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684087 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b2e0586-6957-4ede-b0cc-538dc8082425-ovn-node-metrics-cert\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684126 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-etc-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684158 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-netns\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684178 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684226 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-etc-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684260 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-node-log\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684274 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-netns\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684288 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-slash\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684324 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-slash\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684325 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-log-socket\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684359 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-systemd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684378 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-var-lib-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684398 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684424 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65fl9\" (UniqueName: \"kubernetes.io/projected/1b2e0586-6957-4ede-b0cc-538dc8082425-kube-api-access-65fl9\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684527 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-bin\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684553 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-ovn\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684577 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-script-lib\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684619 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-netd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684648 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-config\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684681 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-env-overrides\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684719 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-kubelet\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684749 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-systemd-units\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684783 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684793 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684858 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbxc5\" (UniqueName: \"kubernetes.io/projected/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-kube-api-access-tbxc5\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684884 4888 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684900 4888 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/578ef97f-2ce3-405a-9f4e-fcaa5f98df07-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684936 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-kubelet\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.684970 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-systemd-units\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685001 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-run-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685159 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-script-lib\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685221 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-log-socket\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685246 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-systemd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685256 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-netd\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685293 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685298 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-env-overrides\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685360 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-node-log\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685267 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-var-lib-openvswitch\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685405 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-host-cni-bin\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685437 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1b2e0586-6957-4ede-b0cc-538dc8082425-run-ovn\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.685837 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1b2e0586-6957-4ede-b0cc-538dc8082425-ovnkube-config\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.687380 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1b2e0586-6957-4ede-b0cc-538dc8082425-ovn-node-metrics-cert\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.696988 4888 scope.go:117] "RemoveContainer" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.702072 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65fl9\" (UniqueName: \"kubernetes.io/projected/1b2e0586-6957-4ede-b0cc-538dc8082425-kube-api-access-65fl9\") pod \"ovnkube-node-84dzf\" (UID: \"1b2e0586-6957-4ede-b0cc-538dc8082425\") " pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.709404 4888 scope.go:117] "RemoveContainer" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.721876 4888 scope.go:117] "RemoveContainer" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.733395 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.733751 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.733806 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} err="failed to get container status \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.733828 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.734120 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": container with ID starting with 71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa not found: ID does not exist" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734171 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} err="failed to get container status \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": rpc error: code = NotFound desc = could not find container \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": container with ID starting with 71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734220 4888 scope.go:117] "RemoveContainer" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.734482 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": container with ID starting with 402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710 not found: ID does not exist" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734506 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} err="failed to get container status \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": rpc error: code = NotFound desc = could not find container \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": container with ID starting with 402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734521 4888 scope.go:117] "RemoveContainer" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.734736 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": container with ID starting with 3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1 not found: ID does not exist" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734762 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} err="failed to get container status \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": rpc error: code = NotFound desc = could not find container \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": container with ID starting with 3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.734783 4888 scope.go:117] "RemoveContainer" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.735007 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": container with ID starting with 3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15 not found: ID does not exist" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735031 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} err="failed to get container status \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": rpc error: code = NotFound desc = could not find container \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": container with ID starting with 3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735043 4888 scope.go:117] "RemoveContainer" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.735232 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": container with ID starting with ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d not found: ID does not exist" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735253 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} err="failed to get container status \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": rpc error: code = NotFound desc = could not find container \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": container with ID starting with ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735265 4888 scope.go:117] "RemoveContainer" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.735427 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": container with ID starting with 51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26 not found: ID does not exist" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735447 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} err="failed to get container status \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": rpc error: code = NotFound desc = could not find container \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": container with ID starting with 51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735458 4888 scope.go:117] "RemoveContainer" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.735648 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": container with ID starting with 366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585 not found: ID does not exist" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735676 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} err="failed to get container status \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": rpc error: code = NotFound desc = could not find container \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": container with ID starting with 366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735695 4888 scope.go:117] "RemoveContainer" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.735886 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": container with ID starting with 17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81 not found: ID does not exist" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735907 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} err="failed to get container status \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": rpc error: code = NotFound desc = could not find container \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": container with ID starting with 17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.735922 4888 scope.go:117] "RemoveContainer" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: E1201 19:44:33.736107 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": container with ID starting with 5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9 not found: ID does not exist" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736126 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} err="failed to get container status \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": rpc error: code = NotFound desc = could not find container \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": container with ID starting with 5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736138 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736355 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} err="failed to get container status \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736383 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736570 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} err="failed to get container status \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": rpc error: code = NotFound desc = could not find container \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": container with ID starting with 71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736587 4888 scope.go:117] "RemoveContainer" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736772 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} err="failed to get container status \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": rpc error: code = NotFound desc = could not find container \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": container with ID starting with 402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.736809 4888 scope.go:117] "RemoveContainer" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737151 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} err="failed to get container status \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": rpc error: code = NotFound desc = could not find container \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": container with ID starting with 3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737170 4888 scope.go:117] "RemoveContainer" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737406 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} err="failed to get container status \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": rpc error: code = NotFound desc = could not find container \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": container with ID starting with 3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737435 4888 scope.go:117] "RemoveContainer" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737689 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} err="failed to get container status \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": rpc error: code = NotFound desc = could not find container \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": container with ID starting with ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737711 4888 scope.go:117] "RemoveContainer" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737897 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} err="failed to get container status \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": rpc error: code = NotFound desc = could not find container \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": container with ID starting with 51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.737914 4888 scope.go:117] "RemoveContainer" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738094 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} err="failed to get container status \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": rpc error: code = NotFound desc = could not find container \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": container with ID starting with 366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738112 4888 scope.go:117] "RemoveContainer" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738337 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} err="failed to get container status \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": rpc error: code = NotFound desc = could not find container \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": container with ID starting with 17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738361 4888 scope.go:117] "RemoveContainer" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738526 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} err="failed to get container status \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": rpc error: code = NotFound desc = could not find container \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": container with ID starting with 5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738545 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738750 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} err="failed to get container status \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738775 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.738993 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} err="failed to get container status \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": rpc error: code = NotFound desc = could not find container \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": container with ID starting with 71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739013 4888 scope.go:117] "RemoveContainer" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739222 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} err="failed to get container status \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": rpc error: code = NotFound desc = could not find container \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": container with ID starting with 402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739242 4888 scope.go:117] "RemoveContainer" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739419 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} err="failed to get container status \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": rpc error: code = NotFound desc = could not find container \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": container with ID starting with 3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739439 4888 scope.go:117] "RemoveContainer" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739659 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} err="failed to get container status \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": rpc error: code = NotFound desc = could not find container \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": container with ID starting with 3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739676 4888 scope.go:117] "RemoveContainer" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739844 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} err="failed to get container status \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": rpc error: code = NotFound desc = could not find container \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": container with ID starting with ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.739859 4888 scope.go:117] "RemoveContainer" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740073 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} err="failed to get container status \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": rpc error: code = NotFound desc = could not find container \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": container with ID starting with 51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740103 4888 scope.go:117] "RemoveContainer" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740564 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} err="failed to get container status \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": rpc error: code = NotFound desc = could not find container \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": container with ID starting with 366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740587 4888 scope.go:117] "RemoveContainer" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740822 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} err="failed to get container status \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": rpc error: code = NotFound desc = could not find container \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": container with ID starting with 17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.740849 4888 scope.go:117] "RemoveContainer" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741083 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} err="failed to get container status \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": rpc error: code = NotFound desc = could not find container \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": container with ID starting with 5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741104 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741320 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} err="failed to get container status \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741339 4888 scope.go:117] "RemoveContainer" containerID="71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741578 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa"} err="failed to get container status \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": rpc error: code = NotFound desc = could not find container \"71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa\": container with ID starting with 71399e50ff93aeaf833b1acb19552e23d91700e2d331f58dbe387c925564a3aa not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741625 4888 scope.go:117] "RemoveContainer" containerID="402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741814 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710"} err="failed to get container status \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": rpc error: code = NotFound desc = could not find container \"402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710\": container with ID starting with 402c61bdd6e2a0ed0447f7e6d16defe07a61f372d2493d15c78191196087e710 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.741846 4888 scope.go:117] "RemoveContainer" containerID="3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742095 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1"} err="failed to get container status \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": rpc error: code = NotFound desc = could not find container \"3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1\": container with ID starting with 3a0fb4252e34dd515e4ca82dcd4e6f2f06f8acfbff2be06cd915ff1cde1a84e1 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742118 4888 scope.go:117] "RemoveContainer" containerID="3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742319 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15"} err="failed to get container status \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": rpc error: code = NotFound desc = could not find container \"3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15\": container with ID starting with 3ec9b297e298280e4562812b6684ce17443264103d15361812b7c9d1e5035f15 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742341 4888 scope.go:117] "RemoveContainer" containerID="ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742501 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d"} err="failed to get container status \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": rpc error: code = NotFound desc = could not find container \"ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d\": container with ID starting with ee34bb894deab1f9fc3ab45a1b89babd21e736bbdb3ff517abcfe32625c24f9d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742533 4888 scope.go:117] "RemoveContainer" containerID="51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742681 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26"} err="failed to get container status \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": rpc error: code = NotFound desc = could not find container \"51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26\": container with ID starting with 51bfe84ca22b19a8d95e36f8b6b97ffd04cfb4738c16823e984582ddf409ac26 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742698 4888 scope.go:117] "RemoveContainer" containerID="366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742870 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585"} err="failed to get container status \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": rpc error: code = NotFound desc = could not find container \"366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585\": container with ID starting with 366e7a61972d0e6aa69dca6ebde892046406eb8184c56615cd795fca0d4bf585 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.742887 4888 scope.go:117] "RemoveContainer" containerID="17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.743055 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81"} err="failed to get container status \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": rpc error: code = NotFound desc = could not find container \"17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81\": container with ID starting with 17df4512e7f423cc418cab94a8cdcb46fe1a5afe23898dabfbbc4e9e782c9a81 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.743136 4888 scope.go:117] "RemoveContainer" containerID="5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.743332 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9"} err="failed to get container status \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": rpc error: code = NotFound desc = could not find container \"5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9\": container with ID starting with 5c35aed291de78a9f5f97882cebc1989796129996e446a38619c5c13aa1189c9 not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.743350 4888 scope.go:117] "RemoveContainer" containerID="91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.743525 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d"} err="failed to get container status \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": rpc error: code = NotFound desc = could not find container \"91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d\": container with ID starting with 91c984e3bfa0a3846de452fbab84706560b640e5c2b0225db1308b9ebb014b5d not found: ID does not exist" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.799664 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.908135 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f4wj6"] Dec 01 19:44:33 crc kubenswrapper[4888]: I1201 19:44:33.917097 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f4wj6"] Dec 01 19:44:34 crc kubenswrapper[4888]: I1201 19:44:34.457851 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="578ef97f-2ce3-405a-9f4e-fcaa5f98df07" path="/var/lib/kubelet/pods/578ef97f-2ce3-405a-9f4e-fcaa5f98df07/volumes" Dec 01 19:44:34 crc kubenswrapper[4888]: I1201 19:44:34.606379 4888 generic.go:334] "Generic (PLEG): container finished" podID="1b2e0586-6957-4ede-b0cc-538dc8082425" containerID="f31190f0145d2e02292bc4c8b7e552dcc1d0d312718960e18444cb38ae29da69" exitCode=0 Dec 01 19:44:34 crc kubenswrapper[4888]: I1201 19:44:34.606429 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerDied","Data":"f31190f0145d2e02292bc4c8b7e552dcc1d0d312718960e18444cb38ae29da69"} Dec 01 19:44:34 crc kubenswrapper[4888]: I1201 19:44:34.606518 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"e316be1ab709a5ecd7bb98b04888b3cde8143d0cd644f48ff2f19bb4f7963c41"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615042 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"26ab09ab08235f8d3fa058e9a5e87dcc52196819b6016490f1e7082f7d610d38"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615402 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"99d535ed031ac593fe6a1192e9b3cb15f15f9be9345128169fd833db4f2ce124"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615415 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"efe0edd7106de071421cbd7f2aec4435e78d977938ade75bb8a8e1334a2b6b8c"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615427 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"4e603eccf1427794ad63cc618a8c21f2f6a7f58318c63fdb59cfecbafcb734b9"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615435 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"e0bfcfdde7e8e19f4a56b430c0eb4617fe9a793065baae7d0ac2b2623f6a1100"} Dec 01 19:44:35 crc kubenswrapper[4888]: I1201 19:44:35.615444 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"f3014bc0540083329625019c528d61d0e52bdf7b534c70f48c150674ed0a23cb"} Dec 01 19:44:38 crc kubenswrapper[4888]: I1201 19:44:38.635917 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"ed01b72672e24ec2b158fba47cc60e50bd6b47de41d0fb06d4eb08e8a2efd796"} Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.649160 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" event={"ID":"1b2e0586-6957-4ede-b0cc-538dc8082425","Type":"ContainerStarted","Data":"892bb3786a3675003bd1d440b19b6c0a278f3f865827049c3d4d0979182edc47"} Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.650361 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.650524 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.650548 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.676936 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" podStartSLOduration=7.67692087 podStartE2EDuration="7.67692087s" podCreationTimestamp="2025-12-01 19:44:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:44:40.674134058 +0000 UTC m=+680.545163982" watchObservedRunningTime="2025-12-01 19:44:40.67692087 +0000 UTC m=+680.547950784" Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.677132 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:40 crc kubenswrapper[4888]: I1201 19:44:40.684401 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:44:47 crc kubenswrapper[4888]: I1201 19:44:47.450653 4888 scope.go:117] "RemoveContainer" containerID="05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730" Dec 01 19:44:47 crc kubenswrapper[4888]: E1201 19:44:47.451329 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hfpdh_openshift-multus(08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6)\"" pod="openshift-multus/multus-hfpdh" podUID="08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6" Dec 01 19:44:50 crc kubenswrapper[4888]: I1201 19:44:50.038088 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:44:50 crc kubenswrapper[4888]: I1201 19:44:50.038463 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:44:58 crc kubenswrapper[4888]: I1201 19:44:58.450772 4888 scope.go:117] "RemoveContainer" containerID="05742991d76b9a6ad942fdb435a365481efaba21b31d4384cebedc8464094730" Dec 01 19:44:58 crc kubenswrapper[4888]: I1201 19:44:58.750644 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/2.log" Dec 01 19:44:58 crc kubenswrapper[4888]: I1201 19:44:58.751436 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/1.log" Dec 01 19:44:58 crc kubenswrapper[4888]: I1201 19:44:58.751564 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hfpdh" event={"ID":"08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6","Type":"ContainerStarted","Data":"2bf78a50139f0223afbaf9811d2c66ee486821accef4368bdde4d21eb5af03b3"} Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.163850 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84"] Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.165020 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.168842 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.169123 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.181596 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84"] Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.218144 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.218285 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz2lb\" (UniqueName: \"kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.218332 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.319085 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.319522 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz2lb\" (UniqueName: \"kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.319623 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.319934 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.326748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.341534 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz2lb\" (UniqueName: \"kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb\") pod \"collect-profiles-29410305-tht84\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.489132 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.668912 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84"] Dec 01 19:45:00 crc kubenswrapper[4888]: I1201 19:45:00.762130 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" event={"ID":"d8a2afe5-2d97-407d-a226-40c0d3a61690","Type":"ContainerStarted","Data":"d6f6b2b2f43bf01d7fc9b69f77179525a3129ee5d52bde909f4653387acffcde"} Dec 01 19:45:01 crc kubenswrapper[4888]: I1201 19:45:01.772450 4888 generic.go:334] "Generic (PLEG): container finished" podID="d8a2afe5-2d97-407d-a226-40c0d3a61690" containerID="5308917796fe41ce9d4974d52f1ca3a51918251b7ec9f3e894bc2ff57911b1d5" exitCode=0 Dec 01 19:45:01 crc kubenswrapper[4888]: I1201 19:45:01.772582 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" event={"ID":"d8a2afe5-2d97-407d-a226-40c0d3a61690","Type":"ContainerDied","Data":"5308917796fe41ce9d4974d52f1ca3a51918251b7ec9f3e894bc2ff57911b1d5"} Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.033105 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.152584 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume\") pod \"d8a2afe5-2d97-407d-a226-40c0d3a61690\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.152691 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz2lb\" (UniqueName: \"kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb\") pod \"d8a2afe5-2d97-407d-a226-40c0d3a61690\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.152767 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume\") pod \"d8a2afe5-2d97-407d-a226-40c0d3a61690\" (UID: \"d8a2afe5-2d97-407d-a226-40c0d3a61690\") " Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.153399 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume" (OuterVolumeSpecName: "config-volume") pod "d8a2afe5-2d97-407d-a226-40c0d3a61690" (UID: "d8a2afe5-2d97-407d-a226-40c0d3a61690"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.158262 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb" (OuterVolumeSpecName: "kube-api-access-cz2lb") pod "d8a2afe5-2d97-407d-a226-40c0d3a61690" (UID: "d8a2afe5-2d97-407d-a226-40c0d3a61690"). InnerVolumeSpecName "kube-api-access-cz2lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.158485 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d8a2afe5-2d97-407d-a226-40c0d3a61690" (UID: "d8a2afe5-2d97-407d-a226-40c0d3a61690"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.254455 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d8a2afe5-2d97-407d-a226-40c0d3a61690-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.254720 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d8a2afe5-2d97-407d-a226-40c0d3a61690-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.254797 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz2lb\" (UniqueName: \"kubernetes.io/projected/d8a2afe5-2d97-407d-a226-40c0d3a61690-kube-api-access-cz2lb\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.785606 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" event={"ID":"d8a2afe5-2d97-407d-a226-40c0d3a61690","Type":"ContainerDied","Data":"d6f6b2b2f43bf01d7fc9b69f77179525a3129ee5d52bde909f4653387acffcde"} Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.785652 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6f6b2b2f43bf01d7fc9b69f77179525a3129ee5d52bde909f4653387acffcde" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.785701 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84" Dec 01 19:45:03 crc kubenswrapper[4888]: I1201 19:45:03.823764 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-84dzf" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.095511 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2"] Dec 01 19:45:10 crc kubenswrapper[4888]: E1201 19:45:10.096061 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8a2afe5-2d97-407d-a226-40c0d3a61690" containerName="collect-profiles" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.096075 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8a2afe5-2d97-407d-a226-40c0d3a61690" containerName="collect-profiles" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.096234 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8a2afe5-2d97-407d-a226-40c0d3a61690" containerName="collect-profiles" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.097077 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.098946 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.110525 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2"] Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.263120 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjt5l\" (UniqueName: \"kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.263222 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.263275 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.363997 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjt5l\" (UniqueName: \"kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.364307 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.364345 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.364833 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.364879 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.387473 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjt5l\" (UniqueName: \"kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.415029 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.789972 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2"] Dec 01 19:45:10 crc kubenswrapper[4888]: I1201 19:45:10.834115 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerStarted","Data":"e40266939b7584f2acc080a882fc16d77c99a8f009c726ad30e81ba7852aaf42"} Dec 01 19:45:11 crc kubenswrapper[4888]: I1201 19:45:11.839562 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerStarted","Data":"3a3b8eaa2b8aae0920687a647f59f37274d0e5e691802e9e8e4f419003c23f71"} Dec 01 19:45:12 crc kubenswrapper[4888]: I1201 19:45:12.848082 4888 generic.go:334] "Generic (PLEG): container finished" podID="3e000b33-9ace-4e1d-b43b-a884375df712" containerID="3a3b8eaa2b8aae0920687a647f59f37274d0e5e691802e9e8e4f419003c23f71" exitCode=0 Dec 01 19:45:12 crc kubenswrapper[4888]: I1201 19:45:12.848117 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerDied","Data":"3a3b8eaa2b8aae0920687a647f59f37274d0e5e691802e9e8e4f419003c23f71"} Dec 01 19:45:14 crc kubenswrapper[4888]: I1201 19:45:14.863975 4888 generic.go:334] "Generic (PLEG): container finished" podID="3e000b33-9ace-4e1d-b43b-a884375df712" containerID="61dca0276d709b606ef2104838207f87e0ad2ccb8b95efefdcd5fcb9ff602fb2" exitCode=0 Dec 01 19:45:14 crc kubenswrapper[4888]: I1201 19:45:14.864062 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerDied","Data":"61dca0276d709b606ef2104838207f87e0ad2ccb8b95efefdcd5fcb9ff602fb2"} Dec 01 19:45:15 crc kubenswrapper[4888]: I1201 19:45:15.871044 4888 generic.go:334] "Generic (PLEG): container finished" podID="3e000b33-9ace-4e1d-b43b-a884375df712" containerID="fff62ffd7085fa6a76496921e2c3c2d459b8c30168ae335e2e8b0c33a13ebb0b" exitCode=0 Dec 01 19:45:15 crc kubenswrapper[4888]: I1201 19:45:15.871088 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerDied","Data":"fff62ffd7085fa6a76496921e2c3c2d459b8c30168ae335e2e8b0c33a13ebb0b"} Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.097166 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.240312 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util\") pod \"3e000b33-9ace-4e1d-b43b-a884375df712\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.240422 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle\") pod \"3e000b33-9ace-4e1d-b43b-a884375df712\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.240498 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjt5l\" (UniqueName: \"kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l\") pod \"3e000b33-9ace-4e1d-b43b-a884375df712\" (UID: \"3e000b33-9ace-4e1d-b43b-a884375df712\") " Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.241277 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle" (OuterVolumeSpecName: "bundle") pod "3e000b33-9ace-4e1d-b43b-a884375df712" (UID: "3e000b33-9ace-4e1d-b43b-a884375df712"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.247150 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l" (OuterVolumeSpecName: "kube-api-access-pjt5l") pod "3e000b33-9ace-4e1d-b43b-a884375df712" (UID: "3e000b33-9ace-4e1d-b43b-a884375df712"). InnerVolumeSpecName "kube-api-access-pjt5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.341678 4888 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.341711 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjt5l\" (UniqueName: \"kubernetes.io/projected/3e000b33-9ace-4e1d-b43b-a884375df712-kube-api-access-pjt5l\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.432057 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util" (OuterVolumeSpecName: "util") pod "3e000b33-9ace-4e1d-b43b-a884375df712" (UID: "3e000b33-9ace-4e1d-b43b-a884375df712"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.443164 4888 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e000b33-9ace-4e1d-b43b-a884375df712-util\") on node \"crc\" DevicePath \"\"" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.890823 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" event={"ID":"3e000b33-9ace-4e1d-b43b-a884375df712","Type":"ContainerDied","Data":"e40266939b7584f2acc080a882fc16d77c99a8f009c726ad30e81ba7852aaf42"} Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.890890 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e40266939b7584f2acc080a882fc16d77c99a8f009c726ad30e81ba7852aaf42" Dec 01 19:45:17 crc kubenswrapper[4888]: I1201 19:45:17.890917 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2" Dec 01 19:45:20 crc kubenswrapper[4888]: I1201 19:45:20.037963 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:45:20 crc kubenswrapper[4888]: I1201 19:45:20.038322 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:45:20 crc kubenswrapper[4888]: I1201 19:45:20.693881 4888 scope.go:117] "RemoveContainer" containerID="a9b475d0a9abab8e6e931f29fc97275b400d3712541107576224e674868bea15" Dec 01 19:45:20 crc kubenswrapper[4888]: I1201 19:45:20.907148 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hfpdh_08a0b7d8-c2e5-4053-a3d7-b4bcdf604ea6/kube-multus/2.log" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.791763 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl"] Dec 01 19:45:21 crc kubenswrapper[4888]: E1201 19:45:21.792309 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="pull" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.792326 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="pull" Dec 01 19:45:21 crc kubenswrapper[4888]: E1201 19:45:21.792337 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="extract" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.792345 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="extract" Dec 01 19:45:21 crc kubenswrapper[4888]: E1201 19:45:21.792358 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="util" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.792365 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="util" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.792494 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e000b33-9ace-4e1d-b43b-a884375df712" containerName="extract" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.792907 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.795047 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-t5slr" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.798001 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.798475 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.803109 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl"] Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.898979 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzjpt\" (UniqueName: \"kubernetes.io/projected/d4346417-1916-4764-949c-3f2a628501e1-kube-api-access-mzjpt\") pod \"nmstate-operator-5b5b58f5c8-xjmvl\" (UID: \"d4346417-1916-4764-949c-3f2a628501e1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" Dec 01 19:45:21 crc kubenswrapper[4888]: I1201 19:45:21.999779 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzjpt\" (UniqueName: \"kubernetes.io/projected/d4346417-1916-4764-949c-3f2a628501e1-kube-api-access-mzjpt\") pod \"nmstate-operator-5b5b58f5c8-xjmvl\" (UID: \"d4346417-1916-4764-949c-3f2a628501e1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" Dec 01 19:45:22 crc kubenswrapper[4888]: I1201 19:45:22.017072 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzjpt\" (UniqueName: \"kubernetes.io/projected/d4346417-1916-4764-949c-3f2a628501e1-kube-api-access-mzjpt\") pod \"nmstate-operator-5b5b58f5c8-xjmvl\" (UID: \"d4346417-1916-4764-949c-3f2a628501e1\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" Dec 01 19:45:22 crc kubenswrapper[4888]: I1201 19:45:22.109209 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" Dec 01 19:45:22 crc kubenswrapper[4888]: I1201 19:45:22.273396 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl"] Dec 01 19:45:22 crc kubenswrapper[4888]: W1201 19:45:22.282413 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4346417_1916_4764_949c_3f2a628501e1.slice/crio-4999c2644c8c9f3912519acc3f877f5e51edf02c6556a54bed2256e6cd7323ea WatchSource:0}: Error finding container 4999c2644c8c9f3912519acc3f877f5e51edf02c6556a54bed2256e6cd7323ea: Status 404 returned error can't find the container with id 4999c2644c8c9f3912519acc3f877f5e51edf02c6556a54bed2256e6cd7323ea Dec 01 19:45:22 crc kubenswrapper[4888]: I1201 19:45:22.917151 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" event={"ID":"d4346417-1916-4764-949c-3f2a628501e1","Type":"ContainerStarted","Data":"4999c2644c8c9f3912519acc3f877f5e51edf02c6556a54bed2256e6cd7323ea"} Dec 01 19:45:24 crc kubenswrapper[4888]: I1201 19:45:24.936340 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" event={"ID":"d4346417-1916-4764-949c-3f2a628501e1","Type":"ContainerStarted","Data":"809c5a18345b9af8770aa0f3cc19bfbb5cbad3a14762317e0d2655a91d550b62"} Dec 01 19:45:24 crc kubenswrapper[4888]: I1201 19:45:24.958467 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-xjmvl" podStartSLOduration=1.936776303 podStartE2EDuration="3.958439995s" podCreationTimestamp="2025-12-01 19:45:21 +0000 UTC" firstStartedPulling="2025-12-01 19:45:22.283797444 +0000 UTC m=+722.154827368" lastFinishedPulling="2025-12-01 19:45:24.305461146 +0000 UTC m=+724.176491060" observedRunningTime="2025-12-01 19:45:24.954936662 +0000 UTC m=+724.825966576" watchObservedRunningTime="2025-12-01 19:45:24.958439995 +0000 UTC m=+724.829469949" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.326459 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.327725 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.331981 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-srbkw" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.343482 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.344441 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.346341 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.347986 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.358362 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-frhtc"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.359291 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.374308 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.468638 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.469450 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.471006 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-sxk99" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.471615 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.481322 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.504842 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g9pv\" (UniqueName: \"kubernetes.io/projected/398ab1db-126e-4ea0-b429-a7563f68c127-kube-api-access-4g9pv\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.504892 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/398ab1db-126e-4ea0-b429-a7563f68c127-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.504930 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-nmstate-lock\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.504953 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97v4r\" (UniqueName: \"kubernetes.io/projected/bc82e383-0b0c-4f71-84b0-8c1de3ba240a-kube-api-access-97v4r\") pod \"nmstate-metrics-7f946cbc9-qddf7\" (UID: \"bc82e383-0b0c-4f71-84b0-8c1de3ba240a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.505001 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-dbus-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.505042 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkpbc\" (UniqueName: \"kubernetes.io/projected/a9567735-6e3f-46d7-aa56-837398be488b-kube-api-access-jkpbc\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.505063 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-ovs-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.509875 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607630 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607680 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-dbus-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607706 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607734 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkpbc\" (UniqueName: \"kubernetes.io/projected/a9567735-6e3f-46d7-aa56-837398be488b-kube-api-access-jkpbc\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607751 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-ovs-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607766 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85lbw\" (UniqueName: \"kubernetes.io/projected/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-kube-api-access-85lbw\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607807 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g9pv\" (UniqueName: \"kubernetes.io/projected/398ab1db-126e-4ea0-b429-a7563f68c127-kube-api-access-4g9pv\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607828 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/398ab1db-126e-4ea0-b429-a7563f68c127-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607850 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-nmstate-lock\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.607867 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97v4r\" (UniqueName: \"kubernetes.io/projected/bc82e383-0b0c-4f71-84b0-8c1de3ba240a-kube-api-access-97v4r\") pod \"nmstate-metrics-7f946cbc9-qddf7\" (UID: \"bc82e383-0b0c-4f71-84b0-8c1de3ba240a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.608038 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-dbus-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.608106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-nmstate-lock\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.608138 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a9567735-6e3f-46d7-aa56-837398be488b-ovs-socket\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.614758 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/398ab1db-126e-4ea0-b429-a7563f68c127-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.627984 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97v4r\" (UniqueName: \"kubernetes.io/projected/bc82e383-0b0c-4f71-84b0-8c1de3ba240a-kube-api-access-97v4r\") pod \"nmstate-metrics-7f946cbc9-qddf7\" (UID: \"bc82e383-0b0c-4f71-84b0-8c1de3ba240a\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.636878 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g9pv\" (UniqueName: \"kubernetes.io/projected/398ab1db-126e-4ea0-b429-a7563f68c127-kube-api-access-4g9pv\") pod \"nmstate-webhook-5f6d4c5ccb-k2bjh\" (UID: \"398ab1db-126e-4ea0-b429-a7563f68c127\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.642378 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.645101 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkpbc\" (UniqueName: \"kubernetes.io/projected/a9567735-6e3f-46d7-aa56-837398be488b-kube-api-access-jkpbc\") pod \"nmstate-handler-frhtc\" (UID: \"a9567735-6e3f-46d7-aa56-837398be488b\") " pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.657475 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-84bd949964-tgqmf"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.658265 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.657487 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.672846 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-84bd949964-tgqmf"] Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.687463 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.708981 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.709037 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.709071 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85lbw\" (UniqueName: \"kubernetes.io/projected/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-kube-api-access-85lbw\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.710653 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.718049 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.724042 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85lbw\" (UniqueName: \"kubernetes.io/projected/e8b41a7b-e30b-40a3-9d94-89af1c9623b6-kube-api-access-85lbw\") pod \"nmstate-console-plugin-7fbb5f6569-jdzp6\" (UID: \"e8b41a7b-e30b-40a3-9d94-89af1c9623b6\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.782955 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810067 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-service-ca\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810780 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-trusted-ca-bundle\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810826 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810843 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpqj4\" (UniqueName: \"kubernetes.io/projected/3bed8879-c28a-460d-b35e-f1e9b96b2185-kube-api-access-cpqj4\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810861 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-oauth-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810881 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.810895 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-oauth-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911654 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-service-ca\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911717 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-trusted-ca-bundle\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911753 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911768 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpqj4\" (UniqueName: \"kubernetes.io/projected/3bed8879-c28a-460d-b35e-f1e9b96b2185-kube-api-access-cpqj4\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911787 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-oauth-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911816 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.911836 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-oauth-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.912872 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.912940 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-service-ca\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.914867 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-oauth-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.915691 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3bed8879-c28a-460d-b35e-f1e9b96b2185-trusted-ca-bundle\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.920833 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-serving-cert\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.924357 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/3bed8879-c28a-460d-b35e-f1e9b96b2185-console-oauth-config\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.929023 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpqj4\" (UniqueName: \"kubernetes.io/projected/3bed8879-c28a-460d-b35e-f1e9b96b2185-kube-api-access-cpqj4\") pod \"console-84bd949964-tgqmf\" (UID: \"3bed8879-c28a-460d-b35e-f1e9b96b2185\") " pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:30 crc kubenswrapper[4888]: I1201 19:45:30.969477 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-frhtc" event={"ID":"a9567735-6e3f-46d7-aa56-837398be488b","Type":"ContainerStarted","Data":"ca9aaf64bfd8c1caa37a2e491b2bfdf48d862078827b978289c4f6b87c565e9f"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.001039 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6"] Dec 01 19:45:31 crc kubenswrapper[4888]: W1201 19:45:31.005733 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8b41a7b_e30b_40a3_9d94_89af1c9623b6.slice/crio-15228ce111ba41cf9134df6c789d11c303a4820354342761a9211d029f013757 WatchSource:0}: Error finding container 15228ce111ba41cf9134df6c789d11c303a4820354342761a9211d029f013757: Status 404 returned error can't find the container with id 15228ce111ba41cf9134df6c789d11c303a4820354342761a9211d029f013757 Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.027071 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.092353 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh"] Dec 01 19:45:31 crc kubenswrapper[4888]: W1201 19:45:31.101887 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod398ab1db_126e_4ea0_b429_a7563f68c127.slice/crio-c84834f6a88f0a057e02ca99c9d0224d5eae2ea80fad205e09e7e61c6bb7fd5a WatchSource:0}: Error finding container c84834f6a88f0a057e02ca99c9d0224d5eae2ea80fad205e09e7e61c6bb7fd5a: Status 404 returned error can't find the container with id c84834f6a88f0a057e02ca99c9d0224d5eae2ea80fad205e09e7e61c6bb7fd5a Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.146108 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7"] Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.403410 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-84bd949964-tgqmf"] Dec 01 19:45:31 crc kubenswrapper[4888]: W1201 19:45:31.405863 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3bed8879_c28a_460d_b35e_f1e9b96b2185.slice/crio-b85a1d6ea0840492bd1769b96ddb572aca5bd204e9a04c73572c228f634d74e0 WatchSource:0}: Error finding container b85a1d6ea0840492bd1769b96ddb572aca5bd204e9a04c73572c228f634d74e0: Status 404 returned error can't find the container with id b85a1d6ea0840492bd1769b96ddb572aca5bd204e9a04c73572c228f634d74e0 Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.976969 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-84bd949964-tgqmf" event={"ID":"3bed8879-c28a-460d-b35e-f1e9b96b2185","Type":"ContainerStarted","Data":"f66214a8acef1d259ad379c3d51fff88576ad5bac5acfbf0c773d277a96c5aa4"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.977309 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-84bd949964-tgqmf" event={"ID":"3bed8879-c28a-460d-b35e-f1e9b96b2185","Type":"ContainerStarted","Data":"b85a1d6ea0840492bd1769b96ddb572aca5bd204e9a04c73572c228f634d74e0"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.980722 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" event={"ID":"e8b41a7b-e30b-40a3-9d94-89af1c9623b6","Type":"ContainerStarted","Data":"15228ce111ba41cf9134df6c789d11c303a4820354342761a9211d029f013757"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.983970 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" event={"ID":"398ab1db-126e-4ea0-b429-a7563f68c127","Type":"ContainerStarted","Data":"c84834f6a88f0a057e02ca99c9d0224d5eae2ea80fad205e09e7e61c6bb7fd5a"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.985152 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" event={"ID":"bc82e383-0b0c-4f71-84b0-8c1de3ba240a","Type":"ContainerStarted","Data":"ba328f208e91408ce2f798f5dc7e8864f36c76c7bf0363e46aae6cb0ec19b0eb"} Dec 01 19:45:31 crc kubenswrapper[4888]: I1201 19:45:31.998394 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-84bd949964-tgqmf" podStartSLOduration=1.998377305 podStartE2EDuration="1.998377305s" podCreationTimestamp="2025-12-01 19:45:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:45:31.998018215 +0000 UTC m=+731.869048149" watchObservedRunningTime="2025-12-01 19:45:31.998377305 +0000 UTC m=+731.869407219" Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.025795 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-frhtc" event={"ID":"a9567735-6e3f-46d7-aa56-837398be488b","Type":"ContainerStarted","Data":"01ed8fb96e7dabce228b34eaf28c41e82e94770437a68243abb54f886687651d"} Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.026393 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.027412 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" event={"ID":"e8b41a7b-e30b-40a3-9d94-89af1c9623b6","Type":"ContainerStarted","Data":"c0c45c762a369d926a76e0e0e1605bb2d029c75e58517b310aad600b61a14484"} Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.029016 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" event={"ID":"398ab1db-126e-4ea0-b429-a7563f68c127","Type":"ContainerStarted","Data":"4e191085826e32f605f6ea4b73b4bfe8089ff4476160f66148ba38be6993ca89"} Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.029152 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.030892 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" event={"ID":"bc82e383-0b0c-4f71-84b0-8c1de3ba240a","Type":"ContainerStarted","Data":"bf3d55f0a05eb15397a9b56733a8340ed117a2162e104363912a4bfa30f06f12"} Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.041582 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-frhtc" podStartSLOduration=1.3127844419999999 podStartE2EDuration="4.041563676s" podCreationTimestamp="2025-12-01 19:45:30 +0000 UTC" firstStartedPulling="2025-12-01 19:45:30.734521387 +0000 UTC m=+730.605551301" lastFinishedPulling="2025-12-01 19:45:33.463300611 +0000 UTC m=+733.334330535" observedRunningTime="2025-12-01 19:45:34.041191915 +0000 UTC m=+733.912221829" watchObservedRunningTime="2025-12-01 19:45:34.041563676 +0000 UTC m=+733.912593590" Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.059674 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jdzp6" podStartSLOduration=1.617743857 podStartE2EDuration="4.059653915s" podCreationTimestamp="2025-12-01 19:45:30 +0000 UTC" firstStartedPulling="2025-12-01 19:45:31.007767535 +0000 UTC m=+730.878797449" lastFinishedPulling="2025-12-01 19:45:33.449677593 +0000 UTC m=+733.320707507" observedRunningTime="2025-12-01 19:45:34.055331259 +0000 UTC m=+733.926361183" watchObservedRunningTime="2025-12-01 19:45:34.059653915 +0000 UTC m=+733.930683839" Dec 01 19:45:34 crc kubenswrapper[4888]: I1201 19:45:34.103497 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" podStartSLOduration=1.746720549 podStartE2EDuration="4.103477207s" podCreationTimestamp="2025-12-01 19:45:30 +0000 UTC" firstStartedPulling="2025-12-01 19:45:31.103838164 +0000 UTC m=+730.974868078" lastFinishedPulling="2025-12-01 19:45:33.460594782 +0000 UTC m=+733.331624736" observedRunningTime="2025-12-01 19:45:34.07246883 +0000 UTC m=+733.943498764" watchObservedRunningTime="2025-12-01 19:45:34.103477207 +0000 UTC m=+733.974507151" Dec 01 19:45:36 crc kubenswrapper[4888]: I1201 19:45:36.050056 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" event={"ID":"bc82e383-0b0c-4f71-84b0-8c1de3ba240a","Type":"ContainerStarted","Data":"13b4ec9f99538b03a55ce376220c051d253caae6fb6d1ad7ac31f0688edebb2f"} Dec 01 19:45:40 crc kubenswrapper[4888]: I1201 19:45:40.719128 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-frhtc" Dec 01 19:45:40 crc kubenswrapper[4888]: I1201 19:45:40.742713 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-qddf7" podStartSLOduration=6.05575497 podStartE2EDuration="10.74269238s" podCreationTimestamp="2025-12-01 19:45:30 +0000 UTC" firstStartedPulling="2025-12-01 19:45:31.152331241 +0000 UTC m=+731.023361155" lastFinishedPulling="2025-12-01 19:45:35.839268651 +0000 UTC m=+735.710298565" observedRunningTime="2025-12-01 19:45:36.074349193 +0000 UTC m=+735.945379137" watchObservedRunningTime="2025-12-01 19:45:40.74269238 +0000 UTC m=+740.613722314" Dec 01 19:45:41 crc kubenswrapper[4888]: I1201 19:45:41.027316 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:41 crc kubenswrapper[4888]: I1201 19:45:41.027360 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:41 crc kubenswrapper[4888]: I1201 19:45:41.032222 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:41 crc kubenswrapper[4888]: I1201 19:45:41.099679 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-84bd949964-tgqmf" Dec 01 19:45:41 crc kubenswrapper[4888]: I1201 19:45:41.155449 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:45:48 crc kubenswrapper[4888]: I1201 19:45:48.203073 4888 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.038304 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.038719 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.038774 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.039440 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.039527 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d" gracePeriod=600 Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.164380 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d" exitCode=0 Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.164422 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d"} Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.164456 4888 scope.go:117] "RemoveContainer" containerID="60cddccf6437763438b75e72c847ba2beee28cd56ab20686e7a7632f1278ccc1" Dec 01 19:45:50 crc kubenswrapper[4888]: I1201 19:45:50.662612 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k2bjh" Dec 01 19:45:51 crc kubenswrapper[4888]: I1201 19:45:51.175096 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0"} Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.559768 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69"] Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.561219 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.563151 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.571446 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69"] Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.679783 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.679837 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lmc8\" (UniqueName: \"kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.679864 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.781226 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.781283 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lmc8\" (UniqueName: \"kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.781322 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.781736 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.781818 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.807015 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lmc8\" (UniqueName: \"kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:02 crc kubenswrapper[4888]: I1201 19:46:02.881011 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:03 crc kubenswrapper[4888]: I1201 19:46:03.098052 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69"] Dec 01 19:46:03 crc kubenswrapper[4888]: I1201 19:46:03.250777 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" event={"ID":"8920f51c-9abd-44ee-8418-0f8faa197a1e","Type":"ContainerStarted","Data":"cc0ca17b27ab3b8de1a2b2f5a893eb4baf2326ee4c0496dc448d621c5f6625f3"} Dec 01 19:46:04 crc kubenswrapper[4888]: I1201 19:46:04.259973 4888 generic.go:334] "Generic (PLEG): container finished" podID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerID="400b1229370fbf073cc506334fa5068626253c1f7a11a55ca63509992f1e832d" exitCode=0 Dec 01 19:46:04 crc kubenswrapper[4888]: I1201 19:46:04.260148 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" event={"ID":"8920f51c-9abd-44ee-8418-0f8faa197a1e","Type":"ContainerDied","Data":"400b1229370fbf073cc506334fa5068626253c1f7a11a55ca63509992f1e832d"} Dec 01 19:46:04 crc kubenswrapper[4888]: I1201 19:46:04.934551 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:04 crc kubenswrapper[4888]: I1201 19:46:04.935789 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.013683 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.018832 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.018870 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.018947 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ws7s\" (UniqueName: \"kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.120568 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ws7s\" (UniqueName: \"kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.120653 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.120676 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.121421 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.121512 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.156103 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ws7s\" (UniqueName: \"kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s\") pod \"redhat-operators-mtjqp\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.271511 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:05 crc kubenswrapper[4888]: I1201 19:46:05.532303 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.218458 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-bt5fw" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerName="console" containerID="cri-o://e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486" gracePeriod=15 Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.273061 4888 generic.go:334] "Generic (PLEG): container finished" podID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerID="efd9be737c18f1e2d5c0b895685c6ff5daed72638e31fe716d71650411108294" exitCode=0 Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.274629 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" event={"ID":"8920f51c-9abd-44ee-8418-0f8faa197a1e","Type":"ContainerDied","Data":"efd9be737c18f1e2d5c0b895685c6ff5daed72638e31fe716d71650411108294"} Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.275536 4888 generic.go:334] "Generic (PLEG): container finished" podID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerID="233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427" exitCode=0 Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.275574 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerDied","Data":"233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427"} Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.275599 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerStarted","Data":"b10f83eced267798159a3df3b81b58b2fd55987481df5e5e0d0bea4224ff0c38"} Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.646037 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bt5fw_9454739e-41f7-48f2-a9ad-8194e0a18251/console/0.log" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.646375 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.740677 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.740739 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.740811 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.741657 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.741725 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.743247 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz8k6\" (UniqueName: \"kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.743367 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.743854 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca" (OuterVolumeSpecName: "service-ca") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.743403 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.743969 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config\") pod \"9454739e-41f7-48f2-a9ad-8194e0a18251\" (UID: \"9454739e-41f7-48f2-a9ad-8194e0a18251\") " Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.744286 4888 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.744303 4888 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.744312 4888 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.744460 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config" (OuterVolumeSpecName: "console-config") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.746838 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6" (OuterVolumeSpecName: "kube-api-access-bz8k6") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "kube-api-access-bz8k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.746876 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.747354 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "9454739e-41f7-48f2-a9ad-8194e0a18251" (UID: "9454739e-41f7-48f2-a9ad-8194e0a18251"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.845816 4888 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9454739e-41f7-48f2-a9ad-8194e0a18251-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.845868 4888 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.845889 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz8k6\" (UniqueName: \"kubernetes.io/projected/9454739e-41f7-48f2-a9ad-8194e0a18251-kube-api-access-bz8k6\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:06 crc kubenswrapper[4888]: I1201 19:46:06.845904 4888 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9454739e-41f7-48f2-a9ad-8194e0a18251-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282112 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-bt5fw_9454739e-41f7-48f2-a9ad-8194e0a18251/console/0.log" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282153 4888 generic.go:334] "Generic (PLEG): container finished" podID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerID="e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486" exitCode=2 Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282216 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bt5fw" event={"ID":"9454739e-41f7-48f2-a9ad-8194e0a18251","Type":"ContainerDied","Data":"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486"} Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282247 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bt5fw" event={"ID":"9454739e-41f7-48f2-a9ad-8194e0a18251","Type":"ContainerDied","Data":"78ff0a2027078e7fa0e124c54c499ae1ce20749d09981bec346c6f1aedfec53d"} Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282268 4888 scope.go:117] "RemoveContainer" containerID="e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.282381 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bt5fw" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.287360 4888 generic.go:334] "Generic (PLEG): container finished" podID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerID="57e46c2d4de4dd7a754203e9bcb6bd0aa5d8dda5860fa106a7c77bd86b974584" exitCode=0 Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.287394 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" event={"ID":"8920f51c-9abd-44ee-8418-0f8faa197a1e","Type":"ContainerDied","Data":"57e46c2d4de4dd7a754203e9bcb6bd0aa5d8dda5860fa106a7c77bd86b974584"} Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.312905 4888 scope.go:117] "RemoveContainer" containerID="e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486" Dec 01 19:46:07 crc kubenswrapper[4888]: E1201 19:46:07.315856 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486\": container with ID starting with e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486 not found: ID does not exist" containerID="e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.315891 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486"} err="failed to get container status \"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486\": rpc error: code = NotFound desc = could not find container \"e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486\": container with ID starting with e480aea3b0079631e3ade32bd01c39337f84dfcf92544b7ff4159b1e5c316486 not found: ID does not exist" Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.325121 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:46:07 crc kubenswrapper[4888]: I1201 19:46:07.330519 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-bt5fw"] Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.295471 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerStarted","Data":"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2"} Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.459416 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" path="/var/lib/kubelet/pods/9454739e-41f7-48f2-a9ad-8194e0a18251/volumes" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.547594 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.669803 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle\") pod \"8920f51c-9abd-44ee-8418-0f8faa197a1e\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.669987 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lmc8\" (UniqueName: \"kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8\") pod \"8920f51c-9abd-44ee-8418-0f8faa197a1e\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.670025 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util\") pod \"8920f51c-9abd-44ee-8418-0f8faa197a1e\" (UID: \"8920f51c-9abd-44ee-8418-0f8faa197a1e\") " Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.670785 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle" (OuterVolumeSpecName: "bundle") pod "8920f51c-9abd-44ee-8418-0f8faa197a1e" (UID: "8920f51c-9abd-44ee-8418-0f8faa197a1e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.675441 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8" (OuterVolumeSpecName: "kube-api-access-5lmc8") pod "8920f51c-9abd-44ee-8418-0f8faa197a1e" (UID: "8920f51c-9abd-44ee-8418-0f8faa197a1e"). InnerVolumeSpecName "kube-api-access-5lmc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.682339 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util" (OuterVolumeSpecName: "util") pod "8920f51c-9abd-44ee-8418-0f8faa197a1e" (UID: "8920f51c-9abd-44ee-8418-0f8faa197a1e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.771074 4888 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.771112 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lmc8\" (UniqueName: \"kubernetes.io/projected/8920f51c-9abd-44ee-8418-0f8faa197a1e-kube-api-access-5lmc8\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:08 crc kubenswrapper[4888]: I1201 19:46:08.771128 4888 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8920f51c-9abd-44ee-8418-0f8faa197a1e-util\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:09 crc kubenswrapper[4888]: I1201 19:46:09.302661 4888 generic.go:334] "Generic (PLEG): container finished" podID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerID="d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2" exitCode=0 Dec 01 19:46:09 crc kubenswrapper[4888]: I1201 19:46:09.302813 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerDied","Data":"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2"} Dec 01 19:46:09 crc kubenswrapper[4888]: I1201 19:46:09.311944 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" event={"ID":"8920f51c-9abd-44ee-8418-0f8faa197a1e","Type":"ContainerDied","Data":"cc0ca17b27ab3b8de1a2b2f5a893eb4baf2326ee4c0496dc448d621c5f6625f3"} Dec 01 19:46:09 crc kubenswrapper[4888]: I1201 19:46:09.311978 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69" Dec 01 19:46:09 crc kubenswrapper[4888]: I1201 19:46:09.311993 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc0ca17b27ab3b8de1a2b2f5a893eb4baf2326ee4c0496dc448d621c5f6625f3" Dec 01 19:46:10 crc kubenswrapper[4888]: I1201 19:46:10.321448 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerStarted","Data":"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687"} Dec 01 19:46:10 crc kubenswrapper[4888]: I1201 19:46:10.343271 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mtjqp" podStartSLOduration=2.436477894 podStartE2EDuration="6.343243676s" podCreationTimestamp="2025-12-01 19:46:04 +0000 UTC" firstStartedPulling="2025-12-01 19:46:06.27660613 +0000 UTC m=+766.147636044" lastFinishedPulling="2025-12-01 19:46:10.183371902 +0000 UTC m=+770.054401826" observedRunningTime="2025-12-01 19:46:10.337123997 +0000 UTC m=+770.208153911" watchObservedRunningTime="2025-12-01 19:46:10.343243676 +0000 UTC m=+770.214273630" Dec 01 19:46:15 crc kubenswrapper[4888]: I1201 19:46:15.272019 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:15 crc kubenswrapper[4888]: I1201 19:46:15.272363 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:15 crc kubenswrapper[4888]: I1201 19:46:15.311083 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:15 crc kubenswrapper[4888]: I1201 19:46:15.388361 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:16 crc kubenswrapper[4888]: I1201 19:46:16.717414 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:17 crc kubenswrapper[4888]: I1201 19:46:17.355887 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mtjqp" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="registry-server" containerID="cri-o://5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687" gracePeriod=2 Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.198043 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.291980 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content\") pod \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.292458 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ws7s\" (UniqueName: \"kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s\") pod \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.292652 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities\") pod \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\" (UID: \"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe\") " Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.295479 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities" (OuterVolumeSpecName: "utilities") pod "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" (UID: "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.304323 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s" (OuterVolumeSpecName: "kube-api-access-9ws7s") pod "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" (UID: "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe"). InnerVolumeSpecName "kube-api-access-9ws7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.376959 4888 generic.go:334] "Generic (PLEG): container finished" podID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerID="5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687" exitCode=0 Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.377001 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerDied","Data":"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687"} Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.377027 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mtjqp" event={"ID":"61e5aba2-8bd7-472d-8cee-cb13ceadf7fe","Type":"ContainerDied","Data":"b10f83eced267798159a3df3b81b58b2fd55987481df5e5e0d0bea4224ff0c38"} Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.377044 4888 scope.go:117] "RemoveContainer" containerID="5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.377157 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mtjqp" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.394410 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ws7s\" (UniqueName: \"kubernetes.io/projected/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-kube-api-access-9ws7s\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.394443 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.421456 4888 scope.go:117] "RemoveContainer" containerID="d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.430610 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" (UID: "61e5aba2-8bd7-472d-8cee-cb13ceadf7fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.449164 4888 scope.go:117] "RemoveContainer" containerID="233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.478870 4888 scope.go:117] "RemoveContainer" containerID="5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.481471 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687\": container with ID starting with 5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687 not found: ID does not exist" containerID="5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.481538 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687"} err="failed to get container status \"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687\": rpc error: code = NotFound desc = could not find container \"5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687\": container with ID starting with 5729cadd6df662df5f8bcb27c3b330b1e327f0aabd9ce771c604614b98a6e687 not found: ID does not exist" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.481578 4888 scope.go:117] "RemoveContainer" containerID="d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.482223 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2\": container with ID starting with d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2 not found: ID does not exist" containerID="d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.482286 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2"} err="failed to get container status \"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2\": rpc error: code = NotFound desc = could not find container \"d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2\": container with ID starting with d5a953e7253599b75898cde63c39000fb355b8050dec1b27b8a4fd7ae3c3cdc2 not found: ID does not exist" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.482332 4888 scope.go:117] "RemoveContainer" containerID="233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.482948 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427\": container with ID starting with 233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427 not found: ID does not exist" containerID="233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.483022 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427"} err="failed to get container status \"233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427\": rpc error: code = NotFound desc = could not find container \"233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427\": container with ID starting with 233abb05590fe7edbd44542d5108ba6a515d3b04097ff23fa489b2513ed01427 not found: ID does not exist" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.495825 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.699126 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.704634 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mtjqp"] Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741076 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c8597757-vv42m"] Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741334 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="extract" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741349 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="extract" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741368 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="registry-server" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741374 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="registry-server" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741386 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="extract-utilities" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741392 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="extract-utilities" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741399 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerName="console" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741405 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerName="console" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741414 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="pull" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741420 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="pull" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741428 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="util" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741435 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="util" Dec 01 19:46:18 crc kubenswrapper[4888]: E1201 19:46:18.741446 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="extract-content" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741453 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="extract-content" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741554 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" containerName="registry-server" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741566 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="8920f51c-9abd-44ee-8418-0f8faa197a1e" containerName="extract" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741573 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="9454739e-41f7-48f2-a9ad-8194e0a18251" containerName="console" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.741952 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.745866 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.746169 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-mvxtz" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.746249 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.746200 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.747557 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.798923 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-webhook-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.799033 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-apiservice-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.799070 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dsrs\" (UniqueName: \"kubernetes.io/projected/a9e96183-2604-4b4c-bc23-a48485783f33-kube-api-access-7dsrs\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.815913 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c8597757-vv42m"] Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.899851 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-apiservice-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.899909 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dsrs\" (UniqueName: \"kubernetes.io/projected/a9e96183-2604-4b4c-bc23-a48485783f33-kube-api-access-7dsrs\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.899951 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-webhook-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.920423 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-apiservice-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.920925 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a9e96183-2604-4b4c-bc23-a48485783f33-webhook-cert\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:18 crc kubenswrapper[4888]: I1201 19:46:18.931001 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dsrs\" (UniqueName: \"kubernetes.io/projected/a9e96183-2604-4b4c-bc23-a48485783f33-kube-api-access-7dsrs\") pod \"metallb-operator-controller-manager-86c8597757-vv42m\" (UID: \"a9e96183-2604-4b4c-bc23-a48485783f33\") " pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.058207 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.133648 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf"] Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.134440 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.137271 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.137634 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-m89vf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.159326 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.163123 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf"] Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.202729 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjp8b\" (UniqueName: \"kubernetes.io/projected/9a88f138-a2b0-4826-8bba-dd3b7942d88b-kube-api-access-gjp8b\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.202791 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-apiservice-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.202818 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-webhook-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.304387 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjp8b\" (UniqueName: \"kubernetes.io/projected/9a88f138-a2b0-4826-8bba-dd3b7942d88b-kube-api-access-gjp8b\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.304471 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-apiservice-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.304501 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-webhook-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.309910 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-webhook-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.311603 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9a88f138-a2b0-4826-8bba-dd3b7942d88b-apiservice-cert\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.321833 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjp8b\" (UniqueName: \"kubernetes.io/projected/9a88f138-a2b0-4826-8bba-dd3b7942d88b-kube-api-access-gjp8b\") pod \"metallb-operator-webhook-server-75d75bc95b-g8xkf\" (UID: \"9a88f138-a2b0-4826-8bba-dd3b7942d88b\") " pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.503529 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.516033 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-86c8597757-vv42m"] Dec 01 19:46:19 crc kubenswrapper[4888]: I1201 19:46:19.753950 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf"] Dec 01 19:46:19 crc kubenswrapper[4888]: W1201 19:46:19.768992 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a88f138_a2b0_4826_8bba_dd3b7942d88b.slice/crio-8125cac851610549050629d0367ebc799add8bc7e690016b806206c9d28a6848 WatchSource:0}: Error finding container 8125cac851610549050629d0367ebc799add8bc7e690016b806206c9d28a6848: Status 404 returned error can't find the container with id 8125cac851610549050629d0367ebc799add8bc7e690016b806206c9d28a6848 Dec 01 19:46:20 crc kubenswrapper[4888]: I1201 19:46:20.391870 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" event={"ID":"9a88f138-a2b0-4826-8bba-dd3b7942d88b","Type":"ContainerStarted","Data":"8125cac851610549050629d0367ebc799add8bc7e690016b806206c9d28a6848"} Dec 01 19:46:20 crc kubenswrapper[4888]: I1201 19:46:20.393073 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" event={"ID":"a9e96183-2604-4b4c-bc23-a48485783f33","Type":"ContainerStarted","Data":"fad39bbc24e9fb0deedf417fcdc745b510e448e8e2a75554da1e37adb6ef96ea"} Dec 01 19:46:20 crc kubenswrapper[4888]: I1201 19:46:20.464595 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61e5aba2-8bd7-472d-8cee-cb13ceadf7fe" path="/var/lib/kubelet/pods/61e5aba2-8bd7-472d-8cee-cb13ceadf7fe/volumes" Dec 01 19:46:23 crc kubenswrapper[4888]: I1201 19:46:23.441052 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" event={"ID":"a9e96183-2604-4b4c-bc23-a48485783f33","Type":"ContainerStarted","Data":"8be6ac37bfb84cff70ce1196883eb6e3ca3bdbc6056cbf543758969436ebbec8"} Dec 01 19:46:23 crc kubenswrapper[4888]: I1201 19:46:23.441782 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:23 crc kubenswrapper[4888]: I1201 19:46:23.463244 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" podStartSLOduration=2.307954079 podStartE2EDuration="5.463222859s" podCreationTimestamp="2025-12-01 19:46:18 +0000 UTC" firstStartedPulling="2025-12-01 19:46:19.528397138 +0000 UTC m=+779.399427052" lastFinishedPulling="2025-12-01 19:46:22.683665918 +0000 UTC m=+782.554695832" observedRunningTime="2025-12-01 19:46:23.459047949 +0000 UTC m=+783.330077863" watchObservedRunningTime="2025-12-01 19:46:23.463222859 +0000 UTC m=+783.334252783" Dec 01 19:46:24 crc kubenswrapper[4888]: I1201 19:46:24.447155 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" event={"ID":"9a88f138-a2b0-4826-8bba-dd3b7942d88b","Type":"ContainerStarted","Data":"0996f04c573a6655f427d1577ffe8e41fe24b0360128491eecdc4aa7c8292e64"} Dec 01 19:46:24 crc kubenswrapper[4888]: I1201 19:46:24.462373 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" podStartSLOduration=1.032080507 podStartE2EDuration="5.462360076s" podCreationTimestamp="2025-12-01 19:46:19 +0000 UTC" firstStartedPulling="2025-12-01 19:46:19.772641668 +0000 UTC m=+779.643671582" lastFinishedPulling="2025-12-01 19:46:24.202921247 +0000 UTC m=+784.073951151" observedRunningTime="2025-12-01 19:46:24.461317726 +0000 UTC m=+784.332347640" watchObservedRunningTime="2025-12-01 19:46:24.462360076 +0000 UTC m=+784.333389980" Dec 01 19:46:25 crc kubenswrapper[4888]: I1201 19:46:25.452169 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:39 crc kubenswrapper[4888]: I1201 19:46:39.508823 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-75d75bc95b-g8xkf" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.061063 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-86c8597757-vv42m" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.813623 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-wjv4g"] Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.817875 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh"] Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.818831 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.820058 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.823601 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-wtpm6" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.824015 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.830412 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.832513 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.842008 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh"] Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.845575 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-sockets\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.845667 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-conf\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.845712 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/82396ea9-c5dd-4464-87f8-972b933e048e-frr-startup\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.945092 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-9zffn"] Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.945927 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9zffn" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.946871 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.946942 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/82396ea9-c5dd-4464-87f8-972b933e048e-frr-startup\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.946976 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87tb6\" (UniqueName: \"kubernetes.io/projected/76b0bcdf-1744-4b10-8576-7bf114e2ec63-kube-api-access-87tb6\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947008 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-reloader\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947033 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-sockets\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947058 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkvtd\" (UniqueName: \"kubernetes.io/projected/82396ea9-c5dd-4464-87f8-972b933e048e-kube-api-access-lkvtd\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947109 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-metrics\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947505 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/76b0bcdf-1744-4b10-8576-7bf114e2ec63-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947747 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-conf\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.947875 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-sockets\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.948136 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-frr-conf\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.948265 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/82396ea9-c5dd-4464-87f8-972b933e048e-frr-startup\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.952292 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.952560 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.952703 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.952931 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-mqs57" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.962510 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-5s49p"] Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.963386 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.965667 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 01 19:46:59 crc kubenswrapper[4888]: I1201 19:46:59.988370 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-5s49p"] Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048675 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjhm6\" (UniqueName: \"kubernetes.io/projected/5234563a-ff0f-42ed-b8da-24b76dc29ebc-kube-api-access-xjhm6\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048718 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22tvr\" (UniqueName: \"kubernetes.io/projected/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-kube-api-access-22tvr\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048742 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048762 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048784 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87tb6\" (UniqueName: \"kubernetes.io/projected/76b0bcdf-1744-4b10-8576-7bf114e2ec63-kube-api-access-87tb6\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048810 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-reloader\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048836 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metrics-certs\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048855 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkvtd\" (UniqueName: \"kubernetes.io/projected/82396ea9-c5dd-4464-87f8-972b933e048e-kube-api-access-lkvtd\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.048887 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.048885 4888 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.049096 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs podName:82396ea9-c5dd-4464-87f8-972b933e048e nodeName:}" failed. No retries permitted until 2025-12-01 19:47:00.549071264 +0000 UTC m=+820.420101178 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs") pod "frr-k8s-wjv4g" (UID: "82396ea9-c5dd-4464-87f8-972b933e048e") : secret "frr-k8s-certs-secret" not found Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049148 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-cert\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049214 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metallb-excludel2\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049273 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-metrics\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049319 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/76b0bcdf-1744-4b10-8576-7bf114e2ec63-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049386 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-reloader\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.049657 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/82396ea9-c5dd-4464-87f8-972b933e048e-metrics\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.057062 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/76b0bcdf-1744-4b10-8576-7bf114e2ec63-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.068532 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkvtd\" (UniqueName: \"kubernetes.io/projected/82396ea9-c5dd-4464-87f8-972b933e048e-kube-api-access-lkvtd\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.076654 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87tb6\" (UniqueName: \"kubernetes.io/projected/76b0bcdf-1744-4b10-8576-7bf114e2ec63-kube-api-access-87tb6\") pod \"frr-k8s-webhook-server-7fcb986d4-wlhmh\" (UID: \"76b0bcdf-1744-4b10-8576-7bf114e2ec63\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.146515 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.149938 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjhm6\" (UniqueName: \"kubernetes.io/projected/5234563a-ff0f-42ed-b8da-24b76dc29ebc-kube-api-access-xjhm6\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.149974 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22tvr\" (UniqueName: \"kubernetes.io/projected/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-kube-api-access-22tvr\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.150004 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.150059 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metrics-certs\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.150090 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.150112 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-cert\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.150131 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metallb-excludel2\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.150267 4888 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.150331 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist podName:84659dfe-1cdd-43b7-bb53-8adbf22e4c20 nodeName:}" failed. No retries permitted until 2025-12-01 19:47:00.650315437 +0000 UTC m=+820.521345351 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist") pod "speaker-9zffn" (UID: "84659dfe-1cdd-43b7-bb53-8adbf22e4c20") : secret "metallb-memberlist" not found Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.150844 4888 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.150878 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs podName:5234563a-ff0f-42ed-b8da-24b76dc29ebc nodeName:}" failed. No retries permitted until 2025-12-01 19:47:00.650871333 +0000 UTC m=+820.521901237 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs") pod "controller-f8648f98b-5s49p" (UID: "5234563a-ff0f-42ed-b8da-24b76dc29ebc") : secret "controller-certs-secret" not found Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.151204 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metallb-excludel2\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.154575 4888 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.155960 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-metrics-certs\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.165476 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-cert\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.171579 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22tvr\" (UniqueName: \"kubernetes.io/projected/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-kube-api-access-22tvr\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.175252 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjhm6\" (UniqueName: \"kubernetes.io/projected/5234563a-ff0f-42ed-b8da-24b76dc29ebc-kube-api-access-xjhm6\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.410452 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh"] Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.556069 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.559618 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/82396ea9-c5dd-4464-87f8-972b933e048e-metrics-certs\") pod \"frr-k8s-wjv4g\" (UID: \"82396ea9-c5dd-4464-87f8-972b933e048e\") " pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.657059 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.657169 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.657410 4888 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 19:47:00 crc kubenswrapper[4888]: E1201 19:47:00.658842 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist podName:84659dfe-1cdd-43b7-bb53-8adbf22e4c20 nodeName:}" failed. No retries permitted until 2025-12-01 19:47:01.65753043 +0000 UTC m=+821.528560374 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist") pod "speaker-9zffn" (UID: "84659dfe-1cdd-43b7-bb53-8adbf22e4c20") : secret "metallb-memberlist" not found Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.661716 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5234563a-ff0f-42ed-b8da-24b76dc29ebc-metrics-certs\") pod \"controller-f8648f98b-5s49p\" (UID: \"5234563a-ff0f-42ed-b8da-24b76dc29ebc\") " pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.669624 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" event={"ID":"76b0bcdf-1744-4b10-8576-7bf114e2ec63","Type":"ContainerStarted","Data":"9b2d65dfd8c5bec6a3b3f224f1306e60007f1141cac374d826787ed3669f89a2"} Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.755914 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:00 crc kubenswrapper[4888]: I1201 19:47:00.875174 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.265388 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-5s49p"] Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.669136 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.677496 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5s49p" event={"ID":"5234563a-ff0f-42ed-b8da-24b76dc29ebc","Type":"ContainerStarted","Data":"f64ff2c235ff386652e7f9965d42eb1281ad3b73d05055bc2a576109cdf005bc"} Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.677553 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5s49p" event={"ID":"5234563a-ff0f-42ed-b8da-24b76dc29ebc","Type":"ContainerStarted","Data":"9742c96cb9b80f06d9f36f44e78c320900f80d159642e6a789f5201326dfb605"} Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.677565 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5s49p" event={"ID":"5234563a-ff0f-42ed-b8da-24b76dc29ebc","Type":"ContainerStarted","Data":"28c9e27c499e858b32f24dab5af31f65f989803f501aadfad86fd7983aa15ff3"} Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.677593 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.678374 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/84659dfe-1cdd-43b7-bb53-8adbf22e4c20-memberlist\") pod \"speaker-9zffn\" (UID: \"84659dfe-1cdd-43b7-bb53-8adbf22e4c20\") " pod="metallb-system/speaker-9zffn" Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.679581 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"d3584f7b59e57b5aa497901589551c0e178f7862e91cd7baccaa96c9f6bbfacc"} Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.697225 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-5s49p" podStartSLOduration=2.697209618 podStartE2EDuration="2.697209618s" podCreationTimestamp="2025-12-01 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:47:01.693507631 +0000 UTC m=+821.564537575" watchObservedRunningTime="2025-12-01 19:47:01.697209618 +0000 UTC m=+821.568239522" Dec 01 19:47:01 crc kubenswrapper[4888]: I1201 19:47:01.761989 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-9zffn" Dec 01 19:47:01 crc kubenswrapper[4888]: W1201 19:47:01.783119 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84659dfe_1cdd_43b7_bb53_8adbf22e4c20.slice/crio-52e9b15af49dbd931ee4b28c37ae7eb63f4ebc6ac82bb74caa3233f6dd27c5f7 WatchSource:0}: Error finding container 52e9b15af49dbd931ee4b28c37ae7eb63f4ebc6ac82bb74caa3233f6dd27c5f7: Status 404 returned error can't find the container with id 52e9b15af49dbd931ee4b28c37ae7eb63f4ebc6ac82bb74caa3233f6dd27c5f7 Dec 01 19:47:02 crc kubenswrapper[4888]: I1201 19:47:02.693686 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9zffn" event={"ID":"84659dfe-1cdd-43b7-bb53-8adbf22e4c20","Type":"ContainerStarted","Data":"068f15daf2400c5627e54546107a1153b40be9ce5639e10dbea1faa0539dbcbc"} Dec 01 19:47:02 crc kubenswrapper[4888]: I1201 19:47:02.693992 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9zffn" event={"ID":"84659dfe-1cdd-43b7-bb53-8adbf22e4c20","Type":"ContainerStarted","Data":"7300ad8c07266ef6053374d192f647430463588e39d3125bbe6e6b8aeefbbbea"} Dec 01 19:47:02 crc kubenswrapper[4888]: I1201 19:47:02.694004 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-9zffn" event={"ID":"84659dfe-1cdd-43b7-bb53-8adbf22e4c20","Type":"ContainerStarted","Data":"52e9b15af49dbd931ee4b28c37ae7eb63f4ebc6ac82bb74caa3233f6dd27c5f7"} Dec 01 19:47:02 crc kubenswrapper[4888]: I1201 19:47:02.694170 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-9zffn" Dec 01 19:47:02 crc kubenswrapper[4888]: I1201 19:47:02.712915 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-9zffn" podStartSLOduration=3.712901149 podStartE2EDuration="3.712901149s" podCreationTimestamp="2025-12-01 19:46:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:47:02.712759965 +0000 UTC m=+822.583789879" watchObservedRunningTime="2025-12-01 19:47:02.712901149 +0000 UTC m=+822.583931063" Dec 01 19:47:07 crc kubenswrapper[4888]: I1201 19:47:07.728968 4888 generic.go:334] "Generic (PLEG): container finished" podID="82396ea9-c5dd-4464-87f8-972b933e048e" containerID="cc9df6792e6e1664981678ab495769b5990e5a658ccc410ed731d918781ab835" exitCode=0 Dec 01 19:47:07 crc kubenswrapper[4888]: I1201 19:47:07.729483 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerDied","Data":"cc9df6792e6e1664981678ab495769b5990e5a658ccc410ed731d918781ab835"} Dec 01 19:47:07 crc kubenswrapper[4888]: I1201 19:47:07.731200 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" event={"ID":"76b0bcdf-1744-4b10-8576-7bf114e2ec63","Type":"ContainerStarted","Data":"5d35791b77456dc4192ec14a82f11e126674ca1a27d7a8a5764978a2aa2fc8e4"} Dec 01 19:47:07 crc kubenswrapper[4888]: I1201 19:47:07.731345 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:07 crc kubenswrapper[4888]: I1201 19:47:07.786338 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" podStartSLOduration=1.8989182900000001 podStartE2EDuration="8.786313138s" podCreationTimestamp="2025-12-01 19:46:59 +0000 UTC" firstStartedPulling="2025-12-01 19:47:00.426733612 +0000 UTC m=+820.297763526" lastFinishedPulling="2025-12-01 19:47:07.31412846 +0000 UTC m=+827.185158374" observedRunningTime="2025-12-01 19:47:07.784309641 +0000 UTC m=+827.655339625" watchObservedRunningTime="2025-12-01 19:47:07.786313138 +0000 UTC m=+827.657343092" Dec 01 19:47:08 crc kubenswrapper[4888]: I1201 19:47:08.739071 4888 generic.go:334] "Generic (PLEG): container finished" podID="82396ea9-c5dd-4464-87f8-972b933e048e" containerID="c1a702f80d473ab21deb9d8827162ee871dd7afea0cc7ec19be2428d07a35c5f" exitCode=0 Dec 01 19:47:08 crc kubenswrapper[4888]: I1201 19:47:08.739158 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerDied","Data":"c1a702f80d473ab21deb9d8827162ee871dd7afea0cc7ec19be2428d07a35c5f"} Dec 01 19:47:09 crc kubenswrapper[4888]: I1201 19:47:09.749571 4888 generic.go:334] "Generic (PLEG): container finished" podID="82396ea9-c5dd-4464-87f8-972b933e048e" containerID="78f651d5c6168be9afe45b366e190edd33683db3398d7bf230b7cff01d72d02f" exitCode=0 Dec 01 19:47:09 crc kubenswrapper[4888]: I1201 19:47:09.749906 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerDied","Data":"78f651d5c6168be9afe45b366e190edd33683db3398d7bf230b7cff01d72d02f"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762484 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"5d30f8e3efe0aa0956e11a5fd300795fd0429fe1f75680041761ac2ec9208f59"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762798 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762810 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"76d39ab8a114d2d57cc0bf9712403fc8cb7c3664cd74056ca22405b6b95a4f83"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762820 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"a465883f38302ec33e3a30e6b700943ba2f79f0ef844c319084ce6e506807c40"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762828 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"338d387b6495f1d32aed750e6ae443a04c6e73fcd1b27fdd87784c64484d69f7"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762836 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"cd099c2e1260c126e895ffb4c77c8dab2b70b593261d80d9faaccd76346f64a3"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.762844 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-wjv4g" event={"ID":"82396ea9-c5dd-4464-87f8-972b933e048e","Type":"ContainerStarted","Data":"8cabe37fc44cb2b118deca7bed9dec0aa4a890518604f83c3b29af67c4efb0f8"} Dec 01 19:47:10 crc kubenswrapper[4888]: I1201 19:47:10.787759 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-wjv4g" podStartSLOduration=5.3543847620000005 podStartE2EDuration="11.787740862s" podCreationTimestamp="2025-12-01 19:46:59 +0000 UTC" firstStartedPulling="2025-12-01 19:47:00.871948237 +0000 UTC m=+820.742978151" lastFinishedPulling="2025-12-01 19:47:07.305304337 +0000 UTC m=+827.176334251" observedRunningTime="2025-12-01 19:47:10.78488299 +0000 UTC m=+830.655912904" watchObservedRunningTime="2025-12-01 19:47:10.787740862 +0000 UTC m=+830.658770776" Dec 01 19:47:11 crc kubenswrapper[4888]: I1201 19:47:11.767761 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-9zffn" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.415508 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.416932 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.420272 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-hgvxk" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.420429 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.420713 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.460915 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr6lj\" (UniqueName: \"kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj\") pod \"openstack-operator-index-z4mmh\" (UID: \"c5e8b40e-75d0-4095-b13a-dc613ea97ddb\") " pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.461153 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.561627 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr6lj\" (UniqueName: \"kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj\") pod \"openstack-operator-index-z4mmh\" (UID: \"c5e8b40e-75d0-4095-b13a-dc613ea97ddb\") " pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.585872 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr6lj\" (UniqueName: \"kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj\") pod \"openstack-operator-index-z4mmh\" (UID: \"c5e8b40e-75d0-4095-b13a-dc613ea97ddb\") " pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:14 crc kubenswrapper[4888]: I1201 19:47:14.743237 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:15 crc kubenswrapper[4888]: I1201 19:47:15.129546 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:15 crc kubenswrapper[4888]: W1201 19:47:15.135168 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5e8b40e_75d0_4095_b13a_dc613ea97ddb.slice/crio-6ec9ef4bb173e5a4d024fc79f2618bda19b8f28bb54996a9b09c746e18e3fb2b WatchSource:0}: Error finding container 6ec9ef4bb173e5a4d024fc79f2618bda19b8f28bb54996a9b09c746e18e3fb2b: Status 404 returned error can't find the container with id 6ec9ef4bb173e5a4d024fc79f2618bda19b8f28bb54996a9b09c746e18e3fb2b Dec 01 19:47:15 crc kubenswrapper[4888]: I1201 19:47:15.757306 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:15 crc kubenswrapper[4888]: I1201 19:47:15.801713 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:15 crc kubenswrapper[4888]: I1201 19:47:15.801758 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4mmh" event={"ID":"c5e8b40e-75d0-4095-b13a-dc613ea97ddb","Type":"ContainerStarted","Data":"6ec9ef4bb173e5a4d024fc79f2618bda19b8f28bb54996a9b09c746e18e3fb2b"} Dec 01 19:47:17 crc kubenswrapper[4888]: I1201 19:47:17.794749 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:17 crc kubenswrapper[4888]: I1201 19:47:17.815771 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4mmh" event={"ID":"c5e8b40e-75d0-4095-b13a-dc613ea97ddb","Type":"ContainerStarted","Data":"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a"} Dec 01 19:47:17 crc kubenswrapper[4888]: I1201 19:47:17.834692 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-z4mmh" podStartSLOduration=1.831237623 podStartE2EDuration="3.834670523s" podCreationTimestamp="2025-12-01 19:47:14 +0000 UTC" firstStartedPulling="2025-12-01 19:47:15.137109032 +0000 UTC m=+835.008138946" lastFinishedPulling="2025-12-01 19:47:17.140541932 +0000 UTC m=+837.011571846" observedRunningTime="2025-12-01 19:47:17.834241631 +0000 UTC m=+837.705271545" watchObservedRunningTime="2025-12-01 19:47:17.834670523 +0000 UTC m=+837.705700437" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.403797 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mwxl6"] Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.405058 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.421589 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mwxl6"] Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.525363 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/805e9225-e4df-4c8c-b543-29bec3f42292-kube-api-access-x29qh\") pod \"openstack-operator-index-mwxl6\" (UID: \"805e9225-e4df-4c8c-b543-29bec3f42292\") " pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.627564 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/805e9225-e4df-4c8c-b543-29bec3f42292-kube-api-access-x29qh\") pod \"openstack-operator-index-mwxl6\" (UID: \"805e9225-e4df-4c8c-b543-29bec3f42292\") " pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.650315 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x29qh\" (UniqueName: \"kubernetes.io/projected/805e9225-e4df-4c8c-b543-29bec3f42292-kube-api-access-x29qh\") pod \"openstack-operator-index-mwxl6\" (UID: \"805e9225-e4df-4c8c-b543-29bec3f42292\") " pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.719958 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:18 crc kubenswrapper[4888]: I1201 19:47:18.833222 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-z4mmh" podUID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" containerName="registry-server" containerID="cri-o://ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a" gracePeriod=2 Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.157669 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mwxl6"] Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.234037 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.334747 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr6lj\" (UniqueName: \"kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj\") pod \"c5e8b40e-75d0-4095-b13a-dc613ea97ddb\" (UID: \"c5e8b40e-75d0-4095-b13a-dc613ea97ddb\") " Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.342953 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj" (OuterVolumeSpecName: "kube-api-access-rr6lj") pod "c5e8b40e-75d0-4095-b13a-dc613ea97ddb" (UID: "c5e8b40e-75d0-4095-b13a-dc613ea97ddb"). InnerVolumeSpecName "kube-api-access-rr6lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.435858 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr6lj\" (UniqueName: \"kubernetes.io/projected/c5e8b40e-75d0-4095-b13a-dc613ea97ddb-kube-api-access-rr6lj\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.841545 4888 generic.go:334] "Generic (PLEG): container finished" podID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" containerID="ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a" exitCode=0 Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.841603 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4mmh" event={"ID":"c5e8b40e-75d0-4095-b13a-dc613ea97ddb","Type":"ContainerDied","Data":"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a"} Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.841667 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4mmh" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.841687 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4mmh" event={"ID":"c5e8b40e-75d0-4095-b13a-dc613ea97ddb","Type":"ContainerDied","Data":"6ec9ef4bb173e5a4d024fc79f2618bda19b8f28bb54996a9b09c746e18e3fb2b"} Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.841712 4888 scope.go:117] "RemoveContainer" containerID="ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.843518 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mwxl6" event={"ID":"805e9225-e4df-4c8c-b543-29bec3f42292","Type":"ContainerStarted","Data":"4b3b55e4a020a098f61da0510bc1bd95be931c2a06b66e9a1e43ebda401d4c92"} Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.843543 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mwxl6" event={"ID":"805e9225-e4df-4c8c-b543-29bec3f42292","Type":"ContainerStarted","Data":"5556e8828ff2d65bdecf09582acae5547092eae744a3664b01ccb0b731606002"} Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.857073 4888 scope.go:117] "RemoveContainer" containerID="ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a" Dec 01 19:47:19 crc kubenswrapper[4888]: E1201 19:47:19.857671 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a\": container with ID starting with ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a not found: ID does not exist" containerID="ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.857732 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a"} err="failed to get container status \"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a\": rpc error: code = NotFound desc = could not find container \"ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a\": container with ID starting with ab5d5eee87bfbc1fd692a8de4630a77eff823d187e9bdbabd82bb327b4cf2a4a not found: ID does not exist" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.869925 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mwxl6" podStartSLOduration=1.823719721 podStartE2EDuration="1.869896195s" podCreationTimestamp="2025-12-01 19:47:18 +0000 UTC" firstStartedPulling="2025-12-01 19:47:19.17667604 +0000 UTC m=+839.047705954" lastFinishedPulling="2025-12-01 19:47:19.222852514 +0000 UTC m=+839.093882428" observedRunningTime="2025-12-01 19:47:19.868825494 +0000 UTC m=+839.739855408" watchObservedRunningTime="2025-12-01 19:47:19.869896195 +0000 UTC m=+839.740926119" Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.890029 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:19 crc kubenswrapper[4888]: I1201 19:47:19.893986 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-z4mmh"] Dec 01 19:47:20 crc kubenswrapper[4888]: I1201 19:47:20.153838 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-wlhmh" Dec 01 19:47:20 crc kubenswrapper[4888]: I1201 19:47:20.464986 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" path="/var/lib/kubelet/pods/c5e8b40e-75d0-4095-b13a-dc613ea97ddb/volumes" Dec 01 19:47:20 crc kubenswrapper[4888]: I1201 19:47:20.760718 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-wjv4g" Dec 01 19:47:20 crc kubenswrapper[4888]: I1201 19:47:20.879503 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-5s49p" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.607386 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:25 crc kubenswrapper[4888]: E1201 19:47:25.608621 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" containerName="registry-server" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.608640 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" containerName="registry-server" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.608809 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e8b40e-75d0-4095-b13a-dc613ea97ddb" containerName="registry-server" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.610049 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.614221 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.614293 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnxds\" (UniqueName: \"kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.614355 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.615840 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.715480 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.715521 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnxds\" (UniqueName: \"kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.715554 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.715982 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.716002 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.734267 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnxds\" (UniqueName: \"kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds\") pod \"redhat-marketplace-kj7tk\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:25 crc kubenswrapper[4888]: I1201 19:47:25.944481 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:26 crc kubenswrapper[4888]: I1201 19:47:26.165760 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:26 crc kubenswrapper[4888]: I1201 19:47:26.884446 4888 generic.go:334] "Generic (PLEG): container finished" podID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerID="283de9f17f588f14dac9bab8557d0550b67ae97b97b844ec2c2ebbdb4045258e" exitCode=0 Dec 01 19:47:26 crc kubenswrapper[4888]: I1201 19:47:26.884484 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerDied","Data":"283de9f17f588f14dac9bab8557d0550b67ae97b97b844ec2c2ebbdb4045258e"} Dec 01 19:47:26 crc kubenswrapper[4888]: I1201 19:47:26.884506 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerStarted","Data":"f35bc6d1e4b4e71d437bc633f5ddab9d311a8bc204acae9f49e6cf947fa44dbb"} Dec 01 19:47:27 crc kubenswrapper[4888]: I1201 19:47:27.893446 4888 generic.go:334] "Generic (PLEG): container finished" podID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerID="0d534184395b71a65d977d794cbe91709855d59b42c9ba8026db8fd275a9ce94" exitCode=0 Dec 01 19:47:27 crc kubenswrapper[4888]: I1201 19:47:27.893538 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerDied","Data":"0d534184395b71a65d977d794cbe91709855d59b42c9ba8026db8fd275a9ce94"} Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.721161 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.721243 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.746262 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.902542 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerStarted","Data":"100ed9b8272ee31daa4202725fd36af37c6e650482381b6f6bd3a4ddb38a7964"} Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.921099 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kj7tk" podStartSLOduration=2.451047113 podStartE2EDuration="3.921076971s" podCreationTimestamp="2025-12-01 19:47:25 +0000 UTC" firstStartedPulling="2025-12-01 19:47:26.886680432 +0000 UTC m=+846.757710346" lastFinishedPulling="2025-12-01 19:47:28.35671029 +0000 UTC m=+848.227740204" observedRunningTime="2025-12-01 19:47:28.916086178 +0000 UTC m=+848.787116092" watchObservedRunningTime="2025-12-01 19:47:28.921076971 +0000 UTC m=+848.792106885" Dec 01 19:47:28 crc kubenswrapper[4888]: I1201 19:47:28.926175 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-mwxl6" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.036226 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9"] Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.037570 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.040116 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-h8c9b" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.050820 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9"] Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.181442 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.181507 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.181554 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6x46\" (UniqueName: \"kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.283407 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.283478 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.283526 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6x46\" (UniqueName: \"kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.283951 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.283949 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.307891 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6x46\" (UniqueName: \"kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46\") pod \"e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.355372 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.745541 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9"] Dec 01 19:47:31 crc kubenswrapper[4888]: W1201 19:47:31.753505 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48623756_fbaa_4a4f_867a_648ffc6becc9.slice/crio-93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007 WatchSource:0}: Error finding container 93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007: Status 404 returned error can't find the container with id 93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007 Dec 01 19:47:31 crc kubenswrapper[4888]: I1201 19:47:31.920846 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" event={"ID":"48623756-fbaa-4a4f-867a-648ffc6becc9","Type":"ContainerStarted","Data":"93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007"} Dec 01 19:47:32 crc kubenswrapper[4888]: I1201 19:47:32.929601 4888 generic.go:334] "Generic (PLEG): container finished" podID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerID="6acf925ba7b0a77d30ddab09cfd49c2b62e1ff3338e26d503f054a5a7d9ec526" exitCode=0 Dec 01 19:47:32 crc kubenswrapper[4888]: I1201 19:47:32.929647 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" event={"ID":"48623756-fbaa-4a4f-867a-648ffc6becc9","Type":"ContainerDied","Data":"6acf925ba7b0a77d30ddab09cfd49c2b62e1ff3338e26d503f054a5a7d9ec526"} Dec 01 19:47:33 crc kubenswrapper[4888]: I1201 19:47:33.938326 4888 generic.go:334] "Generic (PLEG): container finished" podID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerID="a2333fdb6d5cc3af5315ba0f6c6331084184a99439756ba021112e1d94fab25a" exitCode=0 Dec 01 19:47:33 crc kubenswrapper[4888]: I1201 19:47:33.938403 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" event={"ID":"48623756-fbaa-4a4f-867a-648ffc6becc9","Type":"ContainerDied","Data":"a2333fdb6d5cc3af5315ba0f6c6331084184a99439756ba021112e1d94fab25a"} Dec 01 19:47:34 crc kubenswrapper[4888]: I1201 19:47:34.950270 4888 generic.go:334] "Generic (PLEG): container finished" podID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerID="b594feb9d7e7ac39859dd0ab7956c20a95d3c8e7b5b3df86b69efe192ffda31f" exitCode=0 Dec 01 19:47:34 crc kubenswrapper[4888]: I1201 19:47:34.950364 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" event={"ID":"48623756-fbaa-4a4f-867a-648ffc6becc9","Type":"ContainerDied","Data":"b594feb9d7e7ac39859dd0ab7956c20a95d3c8e7b5b3df86b69efe192ffda31f"} Dec 01 19:47:35 crc kubenswrapper[4888]: I1201 19:47:35.944964 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:35 crc kubenswrapper[4888]: I1201 19:47:35.945404 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.004523 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.115138 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.295002 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.491057 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6x46\" (UniqueName: \"kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46\") pod \"48623756-fbaa-4a4f-867a-648ffc6becc9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.491142 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util\") pod \"48623756-fbaa-4a4f-867a-648ffc6becc9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.491227 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle\") pod \"48623756-fbaa-4a4f-867a-648ffc6becc9\" (UID: \"48623756-fbaa-4a4f-867a-648ffc6becc9\") " Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.492100 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle" (OuterVolumeSpecName: "bundle") pod "48623756-fbaa-4a4f-867a-648ffc6becc9" (UID: "48623756-fbaa-4a4f-867a-648ffc6becc9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.502531 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46" (OuterVolumeSpecName: "kube-api-access-q6x46") pod "48623756-fbaa-4a4f-867a-648ffc6becc9" (UID: "48623756-fbaa-4a4f-867a-648ffc6becc9"). InnerVolumeSpecName "kube-api-access-q6x46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.511200 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util" (OuterVolumeSpecName: "util") pod "48623756-fbaa-4a4f-867a-648ffc6becc9" (UID: "48623756-fbaa-4a4f-867a-648ffc6becc9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.593135 4888 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.593258 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6x46\" (UniqueName: \"kubernetes.io/projected/48623756-fbaa-4a4f-867a-648ffc6becc9-kube-api-access-q6x46\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.593281 4888 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/48623756-fbaa-4a4f-867a-648ffc6becc9-util\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.977202 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.977742 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9" event={"ID":"48623756-fbaa-4a4f-867a-648ffc6becc9","Type":"ContainerDied","Data":"93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007"} Dec 01 19:47:36 crc kubenswrapper[4888]: I1201 19:47:36.977781 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93560a809d0d4b801a1ed904d2df04b408414252e4f0860a651d3c6742938007" Dec 01 19:47:38 crc kubenswrapper[4888]: I1201 19:47:38.396926 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:38 crc kubenswrapper[4888]: I1201 19:47:38.397571 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kj7tk" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="registry-server" containerID="cri-o://100ed9b8272ee31daa4202725fd36af37c6e650482381b6f6bd3a4ddb38a7964" gracePeriod=2 Dec 01 19:47:38 crc kubenswrapper[4888]: I1201 19:47:38.991156 4888 generic.go:334] "Generic (PLEG): container finished" podID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerID="100ed9b8272ee31daa4202725fd36af37c6e650482381b6f6bd3a4ddb38a7964" exitCode=0 Dec 01 19:47:38 crc kubenswrapper[4888]: I1201 19:47:38.991208 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerDied","Data":"100ed9b8272ee31daa4202725fd36af37c6e650482381b6f6bd3a4ddb38a7964"} Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.240785 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.432299 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content\") pod \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.432690 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities\") pod \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.432724 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnxds\" (UniqueName: \"kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds\") pod \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\" (UID: \"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8\") " Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.434544 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities" (OuterVolumeSpecName: "utilities") pod "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" (UID: "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.440578 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds" (OuterVolumeSpecName: "kube-api-access-lnxds") pod "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" (UID: "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8"). InnerVolumeSpecName "kube-api-access-lnxds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449006 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq"] Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449289 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="util" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449308 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="util" Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449322 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="extract" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449330 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="extract" Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449337 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="pull" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449343 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="pull" Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449352 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="extract-content" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449358 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="extract-content" Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449370 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="registry-server" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449376 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="registry-server" Dec 01 19:47:39 crc kubenswrapper[4888]: E1201 19:47:39.449388 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="extract-utilities" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449396 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="extract-utilities" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449508 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" containerName="registry-server" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449521 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="48623756-fbaa-4a4f-867a-648ffc6becc9" containerName="extract" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.449940 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.456176 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-fzxwg" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.457370 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" (UID: "1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.469889 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq"] Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.534794 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.535073 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.535152 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnxds\" (UniqueName: \"kubernetes.io/projected/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8-kube-api-access-lnxds\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.636143 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q828m\" (UniqueName: \"kubernetes.io/projected/bf82583b-b2be-41bb-af62-24e74142855f-kube-api-access-q828m\") pod \"openstack-operator-controller-operator-d5b7696c6-7s4mq\" (UID: \"bf82583b-b2be-41bb-af62-24e74142855f\") " pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.737958 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q828m\" (UniqueName: \"kubernetes.io/projected/bf82583b-b2be-41bb-af62-24e74142855f-kube-api-access-q828m\") pod \"openstack-operator-controller-operator-d5b7696c6-7s4mq\" (UID: \"bf82583b-b2be-41bb-af62-24e74142855f\") " pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.754990 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q828m\" (UniqueName: \"kubernetes.io/projected/bf82583b-b2be-41bb-af62-24e74142855f-kube-api-access-q828m\") pod \"openstack-operator-controller-operator-d5b7696c6-7s4mq\" (UID: \"bf82583b-b2be-41bb-af62-24e74142855f\") " pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:39 crc kubenswrapper[4888]: I1201 19:47:39.768541 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.003798 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kj7tk" event={"ID":"1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8","Type":"ContainerDied","Data":"f35bc6d1e4b4e71d437bc633f5ddab9d311a8bc204acae9f49e6cf947fa44dbb"} Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.004168 4888 scope.go:117] "RemoveContainer" containerID="100ed9b8272ee31daa4202725fd36af37c6e650482381b6f6bd3a4ddb38a7964" Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.004400 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kj7tk" Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.043485 4888 scope.go:117] "RemoveContainer" containerID="0d534184395b71a65d977d794cbe91709855d59b42c9ba8026db8fd275a9ce94" Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.049797 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.066149 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kj7tk"] Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.067029 4888 scope.go:117] "RemoveContainer" containerID="283de9f17f588f14dac9bab8557d0550b67ae97b97b844ec2c2ebbdb4045258e" Dec 01 19:47:40 crc kubenswrapper[4888]: E1201 19:47:40.076416 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d2ff9d6_9862_4d8e_801c_dc66d7a8e1c8.slice/crio-f35bc6d1e4b4e71d437bc633f5ddab9d311a8bc204acae9f49e6cf947fa44dbb\": RecentStats: unable to find data in memory cache]" Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.203141 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq"] Dec 01 19:47:40 crc kubenswrapper[4888]: I1201 19:47:40.460107 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8" path="/var/lib/kubelet/pods/1d2ff9d6-9862-4d8e-801c-dc66d7a8e1c8/volumes" Dec 01 19:47:41 crc kubenswrapper[4888]: I1201 19:47:41.016098 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" event={"ID":"bf82583b-b2be-41bb-af62-24e74142855f","Type":"ContainerStarted","Data":"4b1a2ea7e38fc0b17a86a6b4483c4b84c8025adfd93616a1e4fb5a3a5db7de60"} Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.006381 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.008301 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.029235 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.100178 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.100285 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.100334 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqvn5\" (UniqueName: \"kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.201588 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.201670 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.201722 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqvn5\" (UniqueName: \"kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.202472 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.202531 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.228121 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqvn5\" (UniqueName: \"kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5\") pod \"community-operators-crfll\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.336970 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:44 crc kubenswrapper[4888]: I1201 19:47:44.713513 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.052108 4888 generic.go:334] "Generic (PLEG): container finished" podID="06753826-d077-41b1-858c-de9554b7bd40" containerID="f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6" exitCode=0 Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.052157 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerDied","Data":"f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6"} Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.053826 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerStarted","Data":"04e3548d23528a9b5d5ede09c4960cc88d75660c98d7934bc38d478f1c921e66"} Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.055401 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" event={"ID":"bf82583b-b2be-41bb-af62-24e74142855f","Type":"ContainerStarted","Data":"8c665e0f34854fddbeb1dc6ffca9189b81bd35be3c3a6ee07f72d7379fd6a7d1"} Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.055560 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:45 crc kubenswrapper[4888]: I1201 19:47:45.103890 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" podStartSLOduration=2.260940346 podStartE2EDuration="6.103869127s" podCreationTimestamp="2025-12-01 19:47:39 +0000 UTC" firstStartedPulling="2025-12-01 19:47:40.215528604 +0000 UTC m=+860.086558518" lastFinishedPulling="2025-12-01 19:47:44.058457385 +0000 UTC m=+863.929487299" observedRunningTime="2025-12-01 19:47:45.092881562 +0000 UTC m=+864.963911476" watchObservedRunningTime="2025-12-01 19:47:45.103869127 +0000 UTC m=+864.974899041" Dec 01 19:47:46 crc kubenswrapper[4888]: I1201 19:47:46.068712 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerStarted","Data":"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26"} Dec 01 19:47:47 crc kubenswrapper[4888]: I1201 19:47:47.078207 4888 generic.go:334] "Generic (PLEG): container finished" podID="06753826-d077-41b1-858c-de9554b7bd40" containerID="3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26" exitCode=0 Dec 01 19:47:47 crc kubenswrapper[4888]: I1201 19:47:47.078250 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerDied","Data":"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26"} Dec 01 19:47:48 crc kubenswrapper[4888]: I1201 19:47:48.088428 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerStarted","Data":"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300"} Dec 01 19:47:48 crc kubenswrapper[4888]: I1201 19:47:48.116316 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-crfll" podStartSLOduration=2.624584105 podStartE2EDuration="5.116294456s" podCreationTimestamp="2025-12-01 19:47:43 +0000 UTC" firstStartedPulling="2025-12-01 19:47:45.05342743 +0000 UTC m=+864.924457344" lastFinishedPulling="2025-12-01 19:47:47.545137781 +0000 UTC m=+867.416167695" observedRunningTime="2025-12-01 19:47:48.109507082 +0000 UTC m=+867.980537016" watchObservedRunningTime="2025-12-01 19:47:48.116294456 +0000 UTC m=+867.987324380" Dec 01 19:47:49 crc kubenswrapper[4888]: I1201 19:47:49.772517 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-d5b7696c6-7s4mq" Dec 01 19:47:50 crc kubenswrapper[4888]: I1201 19:47:50.038164 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:47:50 crc kubenswrapper[4888]: I1201 19:47:50.038242 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:47:54 crc kubenswrapper[4888]: I1201 19:47:54.337449 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:54 crc kubenswrapper[4888]: I1201 19:47:54.337875 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:54 crc kubenswrapper[4888]: I1201 19:47:54.377064 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:55 crc kubenswrapper[4888]: I1201 19:47:55.169817 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:55 crc kubenswrapper[4888]: I1201 19:47:55.215457 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:57 crc kubenswrapper[4888]: I1201 19:47:57.138219 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-crfll" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="registry-server" containerID="cri-o://bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300" gracePeriod=2 Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.613761 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.710923 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqvn5\" (UniqueName: \"kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5\") pod \"06753826-d077-41b1-858c-de9554b7bd40\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.710985 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities\") pod \"06753826-d077-41b1-858c-de9554b7bd40\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.711032 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content\") pod \"06753826-d077-41b1-858c-de9554b7bd40\" (UID: \"06753826-d077-41b1-858c-de9554b7bd40\") " Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.712239 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities" (OuterVolumeSpecName: "utilities") pod "06753826-d077-41b1-858c-de9554b7bd40" (UID: "06753826-d077-41b1-858c-de9554b7bd40"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.716865 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5" (OuterVolumeSpecName: "kube-api-access-sqvn5") pod "06753826-d077-41b1-858c-de9554b7bd40" (UID: "06753826-d077-41b1-858c-de9554b7bd40"). InnerVolumeSpecName "kube-api-access-sqvn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.772049 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06753826-d077-41b1-858c-de9554b7bd40" (UID: "06753826-d077-41b1-858c-de9554b7bd40"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.813115 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.813165 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06753826-d077-41b1-858c-de9554b7bd40-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:58 crc kubenswrapper[4888]: I1201 19:47:58.813203 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqvn5\" (UniqueName: \"kubernetes.io/projected/06753826-d077-41b1-858c-de9554b7bd40-kube-api-access-sqvn5\") on node \"crc\" DevicePath \"\"" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.150516 4888 generic.go:334] "Generic (PLEG): container finished" podID="06753826-d077-41b1-858c-de9554b7bd40" containerID="bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300" exitCode=0 Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.150572 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crfll" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.150565 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerDied","Data":"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300"} Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.150700 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crfll" event={"ID":"06753826-d077-41b1-858c-de9554b7bd40","Type":"ContainerDied","Data":"04e3548d23528a9b5d5ede09c4960cc88d75660c98d7934bc38d478f1c921e66"} Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.150725 4888 scope.go:117] "RemoveContainer" containerID="bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.164874 4888 scope.go:117] "RemoveContainer" containerID="3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.176789 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.182324 4888 scope.go:117] "RemoveContainer" containerID="f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.182750 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-crfll"] Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.204928 4888 scope.go:117] "RemoveContainer" containerID="bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300" Dec 01 19:47:59 crc kubenswrapper[4888]: E1201 19:47:59.205498 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300\": container with ID starting with bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300 not found: ID does not exist" containerID="bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.205554 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300"} err="failed to get container status \"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300\": rpc error: code = NotFound desc = could not find container \"bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300\": container with ID starting with bf9620c2a76593aff055bdfac02b4df1ccdfd8addedb9aeea94fdf386003e300 not found: ID does not exist" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.205587 4888 scope.go:117] "RemoveContainer" containerID="3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26" Dec 01 19:47:59 crc kubenswrapper[4888]: E1201 19:47:59.205907 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26\": container with ID starting with 3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26 not found: ID does not exist" containerID="3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.205944 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26"} err="failed to get container status \"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26\": rpc error: code = NotFound desc = could not find container \"3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26\": container with ID starting with 3b89f84a93c304739fb2d7277fb175a264f4b67319aded17de82b9d64f991b26 not found: ID does not exist" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.205968 4888 scope.go:117] "RemoveContainer" containerID="f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6" Dec 01 19:47:59 crc kubenswrapper[4888]: E1201 19:47:59.206207 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6\": container with ID starting with f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6 not found: ID does not exist" containerID="f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6" Dec 01 19:47:59 crc kubenswrapper[4888]: I1201 19:47:59.206240 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6"} err="failed to get container status \"f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6\": rpc error: code = NotFound desc = could not find container \"f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6\": container with ID starting with f57cb2d5b7a98cc656a20c4b86a3559c9ac461cfb9a01ac1cc1c66bc9832b2b6 not found: ID does not exist" Dec 01 19:48:00 crc kubenswrapper[4888]: I1201 19:48:00.458988 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06753826-d077-41b1-858c-de9554b7bd40" path="/var/lib/kubelet/pods/06753826-d077-41b1-858c-de9554b7bd40/volumes" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.636105 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:05 crc kubenswrapper[4888]: E1201 19:48:05.637061 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="extract-utilities" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.637081 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="extract-utilities" Dec 01 19:48:05 crc kubenswrapper[4888]: E1201 19:48:05.637094 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="registry-server" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.637104 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="registry-server" Dec 01 19:48:05 crc kubenswrapper[4888]: E1201 19:48:05.637131 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="extract-content" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.637142 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="extract-content" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.637549 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="06753826-d077-41b1-858c-de9554b7bd40" containerName="registry-server" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.638981 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.658838 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.729011 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.729064 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.729084 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8qwk\" (UniqueName: \"kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.830458 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.830515 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.830539 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8qwk\" (UniqueName: \"kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.831234 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.831315 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.860080 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8qwk\" (UniqueName: \"kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk\") pod \"certified-operators-kg5r9\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:05 crc kubenswrapper[4888]: I1201 19:48:05.956612 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:06 crc kubenswrapper[4888]: I1201 19:48:06.438613 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:07 crc kubenswrapper[4888]: I1201 19:48:07.200085 4888 generic.go:334] "Generic (PLEG): container finished" podID="73e11a83-0e8b-45c0-b658-2e510650e935" containerID="27f70d29850344200e196fb064030b1f57bdcb8c35851cc3b172804c099d19e0" exitCode=0 Dec 01 19:48:07 crc kubenswrapper[4888]: I1201 19:48:07.200139 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerDied","Data":"27f70d29850344200e196fb064030b1f57bdcb8c35851cc3b172804c099d19e0"} Dec 01 19:48:07 crc kubenswrapper[4888]: I1201 19:48:07.200171 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerStarted","Data":"9f19fdd29749b6a9bba68e91f99cc8a13d2b8e2462f4787d43a5339289839988"} Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.220713 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerStarted","Data":"eab9582f31ae30913c841b2bc2d531656d1d952df0f82f8eadbf54948e1660ce"} Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.691349 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.692311 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.694541 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-b86hn" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.696793 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.697862 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.700601 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-62zqk" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.736357 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.781912 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.803251 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.804928 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.810862 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-lz7nb" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.811466 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.820084 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.821566 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.826893 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-ttxwj" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.847051 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.853987 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.855724 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.863885 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-dnwlp" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.867391 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.868929 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.871372 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-g6njk" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.872743 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.879634 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdtxx\" (UniqueName: \"kubernetes.io/projected/a363b1d3-f519-41df-bdf8-e80b83edab4d-kube-api-access-jdtxx\") pod \"cinder-operator-controller-manager-859b6ccc6-f58dj\" (UID: \"a363b1d3-f519-41df-bdf8-e80b83edab4d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.879818 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95qv7\" (UniqueName: \"kubernetes.io/projected/1529e922-fd00-4f32-878a-d8a322a7b6b7-kube-api-access-95qv7\") pod \"barbican-operator-controller-manager-7d9dfd778-kjwft\" (UID: \"1529e922-fd00-4f32-878a-d8a322a7b6b7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.888282 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.914314 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.915371 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.916251 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.917128 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.917465 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.918771 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-kwcpj" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.924345 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9qggh" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.939204 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.962857 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.982749 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc"] Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.982760 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpc5b\" (UniqueName: \"kubernetes.io/projected/9f5355dc-205f-4dca-91cf-39209ca1a7b3-kube-api-access-jpc5b\") pod \"glance-operator-controller-manager-668d9c48b9-jszb6\" (UID: \"9f5355dc-205f-4dca-91cf-39209ca1a7b3\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983788 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdtxx\" (UniqueName: \"kubernetes.io/projected/a363b1d3-f519-41df-bdf8-e80b83edab4d-kube-api-access-jdtxx\") pod \"cinder-operator-controller-manager-859b6ccc6-f58dj\" (UID: \"a363b1d3-f519-41df-bdf8-e80b83edab4d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983843 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95qv7\" (UniqueName: \"kubernetes.io/projected/1529e922-fd00-4f32-878a-d8a322a7b6b7-kube-api-access-95qv7\") pod \"barbican-operator-controller-manager-7d9dfd778-kjwft\" (UID: \"1529e922-fd00-4f32-878a-d8a322a7b6b7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983877 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983883 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjglm\" (UniqueName: \"kubernetes.io/projected/1b3586db-3f49-4ee4-aed0-5e4d469fad92-kube-api-access-kjglm\") pod \"horizon-operator-controller-manager-68c6d99b8f-gqwgf\" (UID: \"1b3586db-3f49-4ee4-aed0-5e4d469fad92\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983912 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb9nj\" (UniqueName: \"kubernetes.io/projected/a0f5d22c-34c3-40c2-889c-b7900120919c-kube-api-access-lb9nj\") pod \"designate-operator-controller-manager-78b4bc895b-lbp4z\" (UID: \"a0f5d22c-34c3-40c2-889c-b7900120919c\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.983962 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf7hj\" (UniqueName: \"kubernetes.io/projected/3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2-kube-api-access-wf7hj\") pod \"heat-operator-controller-manager-5f64f6f8bb-8wh4g\" (UID: \"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:08 crc kubenswrapper[4888]: I1201 19:48:08.989773 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-97hzh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.011267 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.012580 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.015289 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdtxx\" (UniqueName: \"kubernetes.io/projected/a363b1d3-f519-41df-bdf8-e80b83edab4d-kube-api-access-jdtxx\") pod \"cinder-operator-controller-manager-859b6ccc6-f58dj\" (UID: \"a363b1d3-f519-41df-bdf8-e80b83edab4d\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.015571 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jxv22" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.019855 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.027927 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95qv7\" (UniqueName: \"kubernetes.io/projected/1529e922-fd00-4f32-878a-d8a322a7b6b7-kube-api-access-95qv7\") pod \"barbican-operator-controller-manager-7d9dfd778-kjwft\" (UID: \"1529e922-fd00-4f32-878a-d8a322a7b6b7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.037498 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.038619 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.039155 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.040270 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-pfkfv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.050614 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.063130 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.080256 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.081759 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085777 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljs89\" (UniqueName: \"kubernetes.io/projected/b7f34996-fe75-4c30-9e22-022f644f7c89-kube-api-access-ljs89\") pod \"keystone-operator-controller-manager-546d4bdf48-446zc\" (UID: \"b7f34996-fe75-4c30-9e22-022f644f7c89\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085833 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl7d8\" (UniqueName: \"kubernetes.io/projected/6cb92420-4e6c-4407-9a54-93f003d1c5e9-kube-api-access-hl7d8\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085867 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjglm\" (UniqueName: \"kubernetes.io/projected/1b3586db-3f49-4ee4-aed0-5e4d469fad92-kube-api-access-kjglm\") pod \"horizon-operator-controller-manager-68c6d99b8f-gqwgf\" (UID: \"1b3586db-3f49-4ee4-aed0-5e4d469fad92\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085887 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb9nj\" (UniqueName: \"kubernetes.io/projected/a0f5d22c-34c3-40c2-889c-b7900120919c-kube-api-access-lb9nj\") pod \"designate-operator-controller-manager-78b4bc895b-lbp4z\" (UID: \"a0f5d22c-34c3-40c2-889c-b7900120919c\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085920 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085937 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf7hj\" (UniqueName: \"kubernetes.io/projected/3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2-kube-api-access-wf7hj\") pod \"heat-operator-controller-manager-5f64f6f8bb-8wh4g\" (UID: \"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085963 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd57w\" (UniqueName: \"kubernetes.io/projected/da594cb2-bb6a-4028-a609-68385c474377-kube-api-access-hd57w\") pod \"ironic-operator-controller-manager-6c548fd776-5mfth\" (UID: \"da594cb2-bb6a-4028-a609-68385c474377\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.085996 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpc5b\" (UniqueName: \"kubernetes.io/projected/9f5355dc-205f-4dca-91cf-39209ca1a7b3-kube-api-access-jpc5b\") pod \"glance-operator-controller-manager-668d9c48b9-jszb6\" (UID: \"9f5355dc-205f-4dca-91cf-39209ca1a7b3\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.099460 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zcstw" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.119905 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.126597 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf7hj\" (UniqueName: \"kubernetes.io/projected/3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2-kube-api-access-wf7hj\") pod \"heat-operator-controller-manager-5f64f6f8bb-8wh4g\" (UID: \"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.139504 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.140695 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.146211 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpc5b\" (UniqueName: \"kubernetes.io/projected/9f5355dc-205f-4dca-91cf-39209ca1a7b3-kube-api-access-jpc5b\") pod \"glance-operator-controller-manager-668d9c48b9-jszb6\" (UID: \"9f5355dc-205f-4dca-91cf-39209ca1a7b3\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.146288 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-swg2v" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.154330 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb9nj\" (UniqueName: \"kubernetes.io/projected/a0f5d22c-34c3-40c2-889c-b7900120919c-kube-api-access-lb9nj\") pod \"designate-operator-controller-manager-78b4bc895b-lbp4z\" (UID: \"a0f5d22c-34c3-40c2-889c-b7900120919c\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.155950 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.157741 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjglm\" (UniqueName: \"kubernetes.io/projected/1b3586db-3f49-4ee4-aed0-5e4d469fad92-kube-api-access-kjglm\") pod \"horizon-operator-controller-manager-68c6d99b8f-gqwgf\" (UID: \"1b3586db-3f49-4ee4-aed0-5e4d469fad92\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.168774 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-psjhd"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.169952 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.172277 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-nvs62" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.183209 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188379 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl7d8\" (UniqueName: \"kubernetes.io/projected/6cb92420-4e6c-4407-9a54-93f003d1c5e9-kube-api-access-hl7d8\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188471 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188529 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cwkz\" (UniqueName: \"kubernetes.io/projected/516e9598-68a9-431a-84af-725e3a053e66-kube-api-access-7cwkz\") pod \"manila-operator-controller-manager-6546668bfd-sszrn\" (UID: \"516e9598-68a9-431a-84af-725e3a053e66\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188555 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd57w\" (UniqueName: \"kubernetes.io/projected/da594cb2-bb6a-4028-a609-68385c474377-kube-api-access-hd57w\") pod \"ironic-operator-controller-manager-6c548fd776-5mfth\" (UID: \"da594cb2-bb6a-4028-a609-68385c474377\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188630 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zql6p\" (UniqueName: \"kubernetes.io/projected/968d110b-5720-400e-9094-8ec39acb4cf6-kube-api-access-zql6p\") pod \"mariadb-operator-controller-manager-56bbcc9d85-pxs95\" (UID: \"968d110b-5720-400e-9094-8ec39acb4cf6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188653 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbfnj\" (UniqueName: \"kubernetes.io/projected/8ad47b0a-b049-45fa-afea-44eb4d5be85f-kube-api-access-pbfnj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tcnh9\" (UID: \"8ad47b0a-b049-45fa-afea-44eb4d5be85f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.188700 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljs89\" (UniqueName: \"kubernetes.io/projected/b7f34996-fe75-4c30-9e22-022f644f7c89-kube-api-access-ljs89\") pod \"keystone-operator-controller-manager-546d4bdf48-446zc\" (UID: \"b7f34996-fe75-4c30-9e22-022f644f7c89\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.189450 4888 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.189519 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert podName:6cb92420-4e6c-4407-9a54-93f003d1c5e9 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:09.689496572 +0000 UTC m=+889.560526486 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert") pod "infra-operator-controller-manager-57548d458d-bjgvv" (UID: "6cb92420-4e6c-4407-9a54-93f003d1c5e9") : secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.215677 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.225000 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd57w\" (UniqueName: \"kubernetes.io/projected/da594cb2-bb6a-4028-a609-68385c474377-kube-api-access-hd57w\") pod \"ironic-operator-controller-manager-6c548fd776-5mfth\" (UID: \"da594cb2-bb6a-4028-a609-68385c474377\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.239459 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljs89\" (UniqueName: \"kubernetes.io/projected/b7f34996-fe75-4c30-9e22-022f644f7c89-kube-api-access-ljs89\") pod \"keystone-operator-controller-manager-546d4bdf48-446zc\" (UID: \"b7f34996-fe75-4c30-9e22-022f644f7c89\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.240061 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-psjhd"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.241128 4888 generic.go:334] "Generic (PLEG): container finished" podID="73e11a83-0e8b-45c0-b658-2e510650e935" containerID="eab9582f31ae30913c841b2bc2d531656d1d952df0f82f8eadbf54948e1660ce" exitCode=0 Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.244931 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerDied","Data":"eab9582f31ae30913c841b2bc2d531656d1d952df0f82f8eadbf54948e1660ce"} Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.275348 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl7d8\" (UniqueName: \"kubernetes.io/projected/6cb92420-4e6c-4407-9a54-93f003d1c5e9-kube-api-access-hl7d8\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.278708 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.318006 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nv86\" (UniqueName: \"kubernetes.io/projected/4651cf7c-a7f9-4137-9d3b-6a656746f373-kube-api-access-6nv86\") pod \"nova-operator-controller-manager-697bc559fc-z2xkh\" (UID: \"4651cf7c-a7f9-4137-9d3b-6a656746f373\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.318279 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zql6p\" (UniqueName: \"kubernetes.io/projected/968d110b-5720-400e-9094-8ec39acb4cf6-kube-api-access-zql6p\") pod \"mariadb-operator-controller-manager-56bbcc9d85-pxs95\" (UID: \"968d110b-5720-400e-9094-8ec39acb4cf6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.327635 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.328466 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.333504 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbfnj\" (UniqueName: \"kubernetes.io/projected/8ad47b0a-b049-45fa-afea-44eb4d5be85f-kube-api-access-pbfnj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tcnh9\" (UID: \"8ad47b0a-b049-45fa-afea-44eb4d5be85f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.333763 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cwkz\" (UniqueName: \"kubernetes.io/projected/516e9598-68a9-431a-84af-725e3a053e66-kube-api-access-7cwkz\") pod \"manila-operator-controller-manager-6546668bfd-sszrn\" (UID: \"516e9598-68a9-431a-84af-725e3a053e66\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.333821 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckxbl\" (UniqueName: \"kubernetes.io/projected/922916d6-2e57-4087-b5ae-24c6318f180a-kube-api-access-ckxbl\") pod \"octavia-operator-controller-manager-998648c74-psjhd\" (UID: \"922916d6-2e57-4087-b5ae-24c6318f180a\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.345238 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.354550 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.357408 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-5v8kh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.358111 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.365890 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbfnj\" (UniqueName: \"kubernetes.io/projected/8ad47b0a-b049-45fa-afea-44eb4d5be85f-kube-api-access-pbfnj\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-tcnh9\" (UID: \"8ad47b0a-b049-45fa-afea-44eb4d5be85f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.368677 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zql6p\" (UniqueName: \"kubernetes.io/projected/968d110b-5720-400e-9094-8ec39acb4cf6-kube-api-access-zql6p\") pod \"mariadb-operator-controller-manager-56bbcc9d85-pxs95\" (UID: \"968d110b-5720-400e-9094-8ec39acb4cf6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.374285 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cwkz\" (UniqueName: \"kubernetes.io/projected/516e9598-68a9-431a-84af-725e3a053e66-kube-api-access-7cwkz\") pod \"manila-operator-controller-manager-6546668bfd-sszrn\" (UID: \"516e9598-68a9-431a-84af-725e3a053e66\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.374411 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.375904 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.380786 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-v8kvw" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.382414 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.402810 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.403306 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.406450 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-mwpnk" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.409365 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.410581 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.412020 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-2tg8c" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.419392 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.427392 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.434903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckxbl\" (UniqueName: \"kubernetes.io/projected/922916d6-2e57-4087-b5ae-24c6318f180a-kube-api-access-ckxbl\") pod \"octavia-operator-controller-manager-998648c74-psjhd\" (UID: \"922916d6-2e57-4087-b5ae-24c6318f180a\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.434959 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nv86\" (UniqueName: \"kubernetes.io/projected/4651cf7c-a7f9-4137-9d3b-6a656746f373-kube-api-access-6nv86\") pod \"nova-operator-controller-manager-697bc559fc-z2xkh\" (UID: \"4651cf7c-a7f9-4137-9d3b-6a656746f373\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.443353 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.458605 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.467560 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.471125 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.472435 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckxbl\" (UniqueName: \"kubernetes.io/projected/922916d6-2e57-4087-b5ae-24c6318f180a-kube-api-access-ckxbl\") pod \"octavia-operator-controller-manager-998648c74-psjhd\" (UID: \"922916d6-2e57-4087-b5ae-24c6318f180a\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.477594 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.487358 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.491729 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nv86\" (UniqueName: \"kubernetes.io/projected/4651cf7c-a7f9-4137-9d3b-6a656746f373-kube-api-access-6nv86\") pod \"nova-operator-controller-manager-697bc559fc-z2xkh\" (UID: \"4651cf7c-a7f9-4137-9d3b-6a656746f373\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.492435 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.501698 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-z7rrw" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.510636 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.519587 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.530222 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-cht8z"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.531645 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.537169 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.539217 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-cht8z"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.540401 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5t8z\" (UniqueName: \"kubernetes.io/projected/06a8d696-66ab-49ef-b858-2245cc6e0023-kube-api-access-s5t8z\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.540426 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.540469 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgmrx\" (UniqueName: \"kubernetes.io/projected/ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02-kube-api-access-lgmrx\") pod \"ovn-operator-controller-manager-b6456fdb6-v2mfg\" (UID: \"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.540732 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9spw2\" (UniqueName: \"kubernetes.io/projected/1f11c3a5-7276-48d5-9dc1-389ab98ffc11-kube-api-access-9spw2\") pod \"swift-operator-controller-manager-5f8c65bbfc-f6vhb\" (UID: \"1f11c3a5-7276-48d5-9dc1-389ab98ffc11\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.540806 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phcgp\" (UniqueName: \"kubernetes.io/projected/7914a20a-7747-446f-a496-deecd734fb83-kube-api-access-phcgp\") pod \"placement-operator-controller-manager-78f8948974-d4tzz\" (UID: \"7914a20a-7747-446f-a496-deecd734fb83\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.542853 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-5fd65" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.545801 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.552878 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.566255 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vkmbj" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.566520 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.585719 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.606809 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.607711 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.613169 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.613532 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bqksd" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.616326 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.641881 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgmrx\" (UniqueName: \"kubernetes.io/projected/ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02-kube-api-access-lgmrx\") pod \"ovn-operator-controller-manager-b6456fdb6-v2mfg\" (UID: \"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642403 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtkvp\" (UniqueName: \"kubernetes.io/projected/e75f9e52-ceaa-463a-ba65-ed651715c4f4-kube-api-access-mtkvp\") pod \"telemetry-operator-controller-manager-76cc84c6bb-g79qf\" (UID: \"e75f9e52-ceaa-463a-ba65-ed651715c4f4\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642484 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9spw2\" (UniqueName: \"kubernetes.io/projected/1f11c3a5-7276-48d5-9dc1-389ab98ffc11-kube-api-access-9spw2\") pod \"swift-operator-controller-manager-5f8c65bbfc-f6vhb\" (UID: \"1f11c3a5-7276-48d5-9dc1-389ab98ffc11\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642520 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phcgp\" (UniqueName: \"kubernetes.io/projected/7914a20a-7747-446f-a496-deecd734fb83-kube-api-access-phcgp\") pod \"placement-operator-controller-manager-78f8948974-d4tzz\" (UID: \"7914a20a-7747-446f-a496-deecd734fb83\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642576 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5t8z\" (UniqueName: \"kubernetes.io/projected/06a8d696-66ab-49ef-b858-2245cc6e0023-kube-api-access-s5t8z\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642605 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.642635 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp8t9\" (UniqueName: \"kubernetes.io/projected/b50af81b-6773-46f1-916e-0346848ba65e-kube-api-access-qp8t9\") pod \"test-operator-controller-manager-5854674fcc-cht8z\" (UID: \"b50af81b-6773-46f1-916e-0346848ba65e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.646026 4888 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.646107 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert podName:06a8d696-66ab-49ef-b858-2245cc6e0023 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:10.146064371 +0000 UTC m=+890.017094285 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" (UID: "06a8d696-66ab-49ef-b858-2245cc6e0023") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.670252 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.681953 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phcgp\" (UniqueName: \"kubernetes.io/projected/7914a20a-7747-446f-a496-deecd734fb83-kube-api-access-phcgp\") pod \"placement-operator-controller-manager-78f8948974-d4tzz\" (UID: \"7914a20a-7747-446f-a496-deecd734fb83\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.702560 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5t8z\" (UniqueName: \"kubernetes.io/projected/06a8d696-66ab-49ef-b858-2245cc6e0023-kube-api-access-s5t8z\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.703135 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgmrx\" (UniqueName: \"kubernetes.io/projected/ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02-kube-api-access-lgmrx\") pod \"ovn-operator-controller-manager-b6456fdb6-v2mfg\" (UID: \"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.707420 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9spw2\" (UniqueName: \"kubernetes.io/projected/1f11c3a5-7276-48d5-9dc1-389ab98ffc11-kube-api-access-9spw2\") pod \"swift-operator-controller-manager-5f8c65bbfc-f6vhb\" (UID: \"1f11c3a5-7276-48d5-9dc1-389ab98ffc11\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.711214 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.712108 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.715980 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-52sgj" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.733171 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.736350 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756369 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m8cl\" (UniqueName: \"kubernetes.io/projected/f94185be-1233-4c97-add2-b6e2fcd22827-kube-api-access-8m8cl\") pod \"watcher-operator-controller-manager-769dc69bc-ntzlv\" (UID: \"f94185be-1233-4c97-add2-b6e2fcd22827\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756450 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756548 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp8t9\" (UniqueName: \"kubernetes.io/projected/b50af81b-6773-46f1-916e-0346848ba65e-kube-api-access-qp8t9\") pod \"test-operator-controller-manager-5854674fcc-cht8z\" (UID: \"b50af81b-6773-46f1-916e-0346848ba65e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756596 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756684 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj97w\" (UniqueName: \"kubernetes.io/projected/b01f5340-ffdb-4963-9e49-47dad6f75642-kube-api-access-rj97w\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756737 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtkvp\" (UniqueName: \"kubernetes.io/projected/e75f9e52-ceaa-463a-ba65-ed651715c4f4-kube-api-access-mtkvp\") pod \"telemetry-operator-controller-manager-76cc84c6bb-g79qf\" (UID: \"e75f9e52-ceaa-463a-ba65-ed651715c4f4\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.756846 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.787542 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.808784 4888 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.808873 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert podName:6cb92420-4e6c-4407-9a54-93f003d1c5e9 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:10.808851119 +0000 UTC m=+890.679881033 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert") pod "infra-operator-controller-manager-57548d458d-bjgvv" (UID: "6cb92420-4e6c-4407-9a54-93f003d1c5e9") : secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.826324 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.842892 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp8t9\" (UniqueName: \"kubernetes.io/projected/b50af81b-6773-46f1-916e-0346848ba65e-kube-api-access-qp8t9\") pod \"test-operator-controller-manager-5854674fcc-cht8z\" (UID: \"b50af81b-6773-46f1-916e-0346848ba65e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.854986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtkvp\" (UniqueName: \"kubernetes.io/projected/e75f9e52-ceaa-463a-ba65-ed651715c4f4-kube-api-access-mtkvp\") pod \"telemetry-operator-controller-manager-76cc84c6bb-g79qf\" (UID: \"e75f9e52-ceaa-463a-ba65-ed651715c4f4\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.862513 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.885060 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcnk7\" (UniqueName: \"kubernetes.io/projected/e11be1d4-dbcb-4e6b-a97a-918425cb85ce-kube-api-access-rcnk7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ql8v9\" (UID: \"e11be1d4-dbcb-4e6b-a97a-918425cb85ce\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.885596 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m8cl\" (UniqueName: \"kubernetes.io/projected/f94185be-1233-4c97-add2-b6e2fcd22827-kube-api-access-8m8cl\") pod \"watcher-operator-controller-manager-769dc69bc-ntzlv\" (UID: \"f94185be-1233-4c97-add2-b6e2fcd22827\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.885646 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.885681 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.885730 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj97w\" (UniqueName: \"kubernetes.io/projected/b01f5340-ffdb-4963-9e49-47dad6f75642-kube-api-access-rj97w\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.886416 4888 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.886479 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:10.386455884 +0000 UTC m=+890.257485798 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "metrics-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.886661 4888 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: E1201 19:48:09.886700 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:10.38669289 +0000 UTC m=+890.257722804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "webhook-server-cert" not found Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.912843 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m8cl\" (UniqueName: \"kubernetes.io/projected/f94185be-1233-4c97-add2-b6e2fcd22827-kube-api-access-8m8cl\") pod \"watcher-operator-controller-manager-769dc69bc-ntzlv\" (UID: \"f94185be-1233-4c97-add2-b6e2fcd22827\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.930968 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj97w\" (UniqueName: \"kubernetes.io/projected/b01f5340-ffdb-4963-9e49-47dad6f75642-kube-api-access-rj97w\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.931794 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj"] Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.988081 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcnk7\" (UniqueName: \"kubernetes.io/projected/e11be1d4-dbcb-4e6b-a97a-918425cb85ce-kube-api-access-rcnk7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ql8v9\" (UID: \"e11be1d4-dbcb-4e6b-a97a-918425cb85ce\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" Dec 01 19:48:09 crc kubenswrapper[4888]: I1201 19:48:09.993846 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.012795 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.029282 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcnk7\" (UniqueName: \"kubernetes.io/projected/e11be1d4-dbcb-4e6b-a97a-918425cb85ce-kube-api-access-rcnk7\") pod \"rabbitmq-cluster-operator-manager-668c99d594-ql8v9\" (UID: \"e11be1d4-dbcb-4e6b-a97a-918425cb85ce\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.064563 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft"] Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.081676 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d94aa0c_c3c1_4354_8a6e_f6b20a56cdf2.slice/crio-fd54ab8544d54b3cab035a9a1795eb90bad71c9e17f7333318d45f475e2a1831 WatchSource:0}: Error finding container fd54ab8544d54b3cab035a9a1795eb90bad71c9e17f7333318d45f475e2a1831: Status 404 returned error can't find the container with id fd54ab8544d54b3cab035a9a1795eb90bad71c9e17f7333318d45f475e2a1831 Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.083111 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b3586db_3f49_4ee4_aed0_5e4d469fad92.slice/crio-b82ca1f432f2c1bcc7538309bef18feb87609042aaefe03eb38f404e9df9d5a8 WatchSource:0}: Error finding container b82ca1f432f2c1bcc7538309bef18feb87609042aaefe03eb38f404e9df9d5a8: Status 404 returned error can't find the container with id b82ca1f432f2c1bcc7538309bef18feb87609042aaefe03eb38f404e9df9d5a8 Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.117339 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.122052 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.151389 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1529e922_fd00_4f32_878a_d8a322a7b6b7.slice/crio-25c494dea4a2dc091b8c64275deb9329de43df291d017c1b92eb96391b095148 WatchSource:0}: Error finding container 25c494dea4a2dc091b8c64275deb9329de43df291d017c1b92eb96391b095148: Status 404 returned error can't find the container with id 25c494dea4a2dc091b8c64275deb9329de43df291d017c1b92eb96391b095148 Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.190013 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.190723 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.191228 4888 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.191289 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert podName:06a8d696-66ab-49ef-b858-2245cc6e0023 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:11.191265163 +0000 UTC m=+891.062295077 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" (UID: "06a8d696-66ab-49ef-b858-2245cc6e0023") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.272381 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" event={"ID":"1529e922-fd00-4f32-878a-d8a322a7b6b7","Type":"ContainerStarted","Data":"25c494dea4a2dc091b8c64275deb9329de43df291d017c1b92eb96391b095148"} Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.281035 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" event={"ID":"a363b1d3-f519-41df-bdf8-e80b83edab4d","Type":"ContainerStarted","Data":"bdfac354b637fb108d3457175a557fbc94d2b78d67aabdbec7dd2c3fae2f0539"} Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.288390 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" event={"ID":"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2","Type":"ContainerStarted","Data":"fd54ab8544d54b3cab035a9a1795eb90bad71c9e17f7333318d45f475e2a1831"} Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.297124 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" event={"ID":"1b3586db-3f49-4ee4-aed0-5e4d469fad92","Type":"ContainerStarted","Data":"b82ca1f432f2c1bcc7538309bef18feb87609042aaefe03eb38f404e9df9d5a8"} Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.310228 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc"] Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.339884 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7f34996_fe75_4c30_9e22_022f644f7c89.slice/crio-10c0a367bffa0f092449423b8b7febdc06cea8c7bb24ef7053a8a59d80091c5a WatchSource:0}: Error finding container 10c0a367bffa0f092449423b8b7febdc06cea8c7bb24ef7053a8a59d80091c5a: Status 404 returned error can't find the container with id 10c0a367bffa0f092449423b8b7febdc06cea8c7bb24ef7053a8a59d80091c5a Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.393371 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.393502 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.393510 4888 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.393574 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:11.393559343 +0000 UTC m=+891.264589257 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.393584 4888 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.393612 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:11.393604134 +0000 UTC m=+891.264634048 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "metrics-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.777874 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.806914 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.850402 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.888918 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.901994 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.902161 4888 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: E1201 19:48:10.902227 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert podName:6cb92420-4e6c-4407-9a54-93f003d1c5e9 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:12.902212217 +0000 UTC m=+892.773242131 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert") pod "infra-operator-controller-manager-57548d458d-bjgvv" (UID: "6cb92420-4e6c-4407-9a54-93f003d1c5e9") : secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.905724 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-psjhd"] Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.914487 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod922916d6_2e57_4087_b5ae_24c6318f180a.slice/crio-2f1fdd638f7b029f6af54e126a52f835f687af7a784e4d22bf499d830419e6ac WatchSource:0}: Error finding container 2f1fdd638f7b029f6af54e126a52f835f687af7a784e4d22bf499d830419e6ac: Status 404 returned error can't find the container with id 2f1fdd638f7b029f6af54e126a52f835f687af7a784e4d22bf499d830419e6ac Dec 01 19:48:10 crc kubenswrapper[4888]: W1201 19:48:10.914941 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f5355dc_205f_4dca_91cf_39209ca1a7b3.slice/crio-a47a34d5f78d830ec3c346a2fd4760fce042ddf81e7292213e2a3a9687743d4f WatchSource:0}: Error finding container a47a34d5f78d830ec3c346a2fd4760fce042ddf81e7292213e2a3a9687743d4f: Status 404 returned error can't find the container with id a47a34d5f78d830ec3c346a2fd4760fce042ddf81e7292213e2a3a9687743d4f Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.925455 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg"] Dec 01 19:48:10 crc kubenswrapper[4888]: I1201 19:48:10.948689 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.092638 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.198712 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.210150 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.210577 4888 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.210662 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert podName:06a8d696-66ab-49ef-b858-2245cc6e0023 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:13.21063857 +0000 UTC m=+893.081668484 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" (UID: "06a8d696-66ab-49ef-b858-2245cc6e0023") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.244157 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.258778 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.276712 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.287968 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf"] Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.289403 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb"] Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.290540 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9spw2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-f6vhb_openstack-operators(1f11c3a5-7276-48d5-9dc1-389ab98ffc11): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.296923 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9spw2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-f6vhb_openstack-operators(1f11c3a5-7276-48d5-9dc1-389ab98ffc11): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.298485 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" podUID="1f11c3a5-7276-48d5-9dc1-389ab98ffc11" Dec 01 19:48:11 crc kubenswrapper[4888]: W1201 19:48:11.299296 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod968d110b_5720_400e_9094_8ec39acb4cf6.slice/crio-401d56b2a74ecf7a2de500fffd30a5c0a6cf54f0c8753fe41d5816298a8cfe49 WatchSource:0}: Error finding container 401d56b2a74ecf7a2de500fffd30a5c0a6cf54f0c8753fe41d5816298a8cfe49: Status 404 returned error can't find the container with id 401d56b2a74ecf7a2de500fffd30a5c0a6cf54f0c8753fe41d5816298a8cfe49 Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.300733 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-cht8z"] Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.314766 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zql6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-pxs95_openstack-operators(968d110b-5720-400e-9094-8ec39acb4cf6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.315773 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" event={"ID":"4651cf7c-a7f9-4137-9d3b-6a656746f373","Type":"ContainerStarted","Data":"0a34a4d2e2c9865625f6d04e7a7a885e118ddf82850a4e40cf2c23cd9f96f9e6"} Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.316885 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zql6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-pxs95_openstack-operators(968d110b-5720-400e-9094-8ec39acb4cf6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.318893 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" podUID="968d110b-5720-400e-9094-8ec39acb4cf6" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.319793 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" event={"ID":"f94185be-1233-4c97-add2-b6e2fcd22827","Type":"ContainerStarted","Data":"472d7e04f05cc8bab789d7c8f2e72044e3e841790f769e241c4791b108508585"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.324492 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" event={"ID":"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02","Type":"ContainerStarted","Data":"82d670602dd633df3d9ea86329561eee3cc068d0235c3970cec3d83cd78ee95a"} Dec 01 19:48:11 crc kubenswrapper[4888]: W1201 19:48:11.326558 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode75f9e52_ceaa_463a_ba65_ed651715c4f4.slice/crio-5d49f882b38b3dee5e3d01acbc1c4d655ca29e3a8fdd63535b1dc5856406b6e4 WatchSource:0}: Error finding container 5d49f882b38b3dee5e3d01acbc1c4d655ca29e3a8fdd63535b1dc5856406b6e4: Status 404 returned error can't find the container with id 5d49f882b38b3dee5e3d01acbc1c4d655ca29e3a8fdd63535b1dc5856406b6e4 Dec 01 19:48:11 crc kubenswrapper[4888]: W1201 19:48:11.327022 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb50af81b_6773_46f1_916e_0346848ba65e.slice/crio-369e8772f6d499e0c88dc51b1fa6dd8ed2e77f4683bed316eaf7c3bdacc0399b WatchSource:0}: Error finding container 369e8772f6d499e0c88dc51b1fa6dd8ed2e77f4683bed316eaf7c3bdacc0399b: Status 404 returned error can't find the container with id 369e8772f6d499e0c88dc51b1fa6dd8ed2e77f4683bed316eaf7c3bdacc0399b Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.327549 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" event={"ID":"da594cb2-bb6a-4028-a609-68385c474377","Type":"ContainerStarted","Data":"5e3eba94bdd30159eac80716917b45cac5537d3fad0ff6c32c82034340914d53"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.329388 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" event={"ID":"922916d6-2e57-4087-b5ae-24c6318f180a","Type":"ContainerStarted","Data":"2f1fdd638f7b029f6af54e126a52f835f687af7a784e4d22bf499d830419e6ac"} Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.335659 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qp8t9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-cht8z_openstack-operators(b50af81b-6773-46f1-916e-0346848ba65e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.335832 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtkvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-g79qf_openstack-operators(e75f9e52-ceaa-463a-ba65-ed651715c4f4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.335858 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" event={"ID":"1f11c3a5-7276-48d5-9dc1-389ab98ffc11","Type":"ContainerStarted","Data":"8165b58731394a175a9d7c7187e5bec5deb6f07b82cec37fc93dacef3fd7e747"} Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.337817 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qp8t9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-cht8z_openstack-operators(b50af81b-6773-46f1-916e-0346848ba65e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.338155 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtkvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-g79qf_openstack-operators(e75f9e52-ceaa-463a-ba65-ed651715c4f4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.339151 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" podUID="b50af81b-6773-46f1-916e-0346848ba65e" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.339232 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" podUID="e75f9e52-ceaa-463a-ba65-ed651715c4f4" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.340968 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" event={"ID":"7914a20a-7747-446f-a496-deecd734fb83","Type":"ContainerStarted","Data":"3c59117cc94b21f4d361dfcb5734a2682d6b2ad2b82a6a503d8ebaecd426d6ff"} Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.342421 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" podUID="1f11c3a5-7276-48d5-9dc1-389ab98ffc11" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.342635 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" event={"ID":"516e9598-68a9-431a-84af-725e3a053e66","Type":"ContainerStarted","Data":"ba0793cac4c0f72105133d30b3aec811dbcc7be9ffe914cd06df3a7471047192"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.344129 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" event={"ID":"b7f34996-fe75-4c30-9e22-022f644f7c89","Type":"ContainerStarted","Data":"10c0a367bffa0f092449423b8b7febdc06cea8c7bb24ef7053a8a59d80091c5a"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.346854 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" event={"ID":"e11be1d4-dbcb-4e6b-a97a-918425cb85ce","Type":"ContainerStarted","Data":"1edb22df016f312b9e43b9453cc7c122df28284a9664c0cfa2f1c86f05fbbc45"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.349549 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" event={"ID":"8ad47b0a-b049-45fa-afea-44eb4d5be85f","Type":"ContainerStarted","Data":"e00721634b67d30f62d8114850abae0f01c0e4e6714323bb1db8eb3a529e8a59"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.355847 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" event={"ID":"9f5355dc-205f-4dca-91cf-39209ca1a7b3","Type":"ContainerStarted","Data":"a47a34d5f78d830ec3c346a2fd4760fce042ddf81e7292213e2a3a9687743d4f"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.384896 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerStarted","Data":"651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.388847 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" event={"ID":"a0f5d22c-34c3-40c2-889c-b7900120919c","Type":"ContainerStarted","Data":"f52e3724a5b461d75a67c17c77d577bb6ce8c9d56452a724629a12102cc43dd8"} Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.412043 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kg5r9" podStartSLOduration=3.367359461 podStartE2EDuration="6.412012893s" podCreationTimestamp="2025-12-01 19:48:05 +0000 UTC" firstStartedPulling="2025-12-01 19:48:07.20204693 +0000 UTC m=+887.073076844" lastFinishedPulling="2025-12-01 19:48:10.246700362 +0000 UTC m=+890.117730276" observedRunningTime="2025-12-01 19:48:11.409713857 +0000 UTC m=+891.280743771" watchObservedRunningTime="2025-12-01 19:48:11.412012893 +0000 UTC m=+891.283042807" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.413341 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:11 crc kubenswrapper[4888]: I1201 19:48:11.413401 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.413556 4888 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.413621 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:13.413603899 +0000 UTC m=+893.284633813 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "webhook-server-cert" not found Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.413625 4888 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 19:48:11 crc kubenswrapper[4888]: E1201 19:48:11.413792 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:13.413733473 +0000 UTC m=+893.284763377 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "metrics-server-cert" not found Dec 01 19:48:12 crc kubenswrapper[4888]: I1201 19:48:12.409172 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" event={"ID":"b50af81b-6773-46f1-916e-0346848ba65e","Type":"ContainerStarted","Data":"369e8772f6d499e0c88dc51b1fa6dd8ed2e77f4683bed316eaf7c3bdacc0399b"} Dec 01 19:48:12 crc kubenswrapper[4888]: I1201 19:48:12.412346 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" event={"ID":"e75f9e52-ceaa-463a-ba65-ed651715c4f4","Type":"ContainerStarted","Data":"5d49f882b38b3dee5e3d01acbc1c4d655ca29e3a8fdd63535b1dc5856406b6e4"} Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.419492 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" podUID="e75f9e52-ceaa-463a-ba65-ed651715c4f4" Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.419675 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" podUID="b50af81b-6773-46f1-916e-0346848ba65e" Dec 01 19:48:12 crc kubenswrapper[4888]: I1201 19:48:12.421559 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" event={"ID":"968d110b-5720-400e-9094-8ec39acb4cf6","Type":"ContainerStarted","Data":"401d56b2a74ecf7a2de500fffd30a5c0a6cf54f0c8753fe41d5816298a8cfe49"} Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.427775 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" podUID="1f11c3a5-7276-48d5-9dc1-389ab98ffc11" Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.438791 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" podUID="968d110b-5720-400e-9094-8ec39acb4cf6" Dec 01 19:48:12 crc kubenswrapper[4888]: I1201 19:48:12.942719 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.943272 4888 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:12 crc kubenswrapper[4888]: E1201 19:48:12.943347 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert podName:6cb92420-4e6c-4407-9a54-93f003d1c5e9 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:16.943327448 +0000 UTC m=+896.814357372 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert") pod "infra-operator-controller-manager-57548d458d-bjgvv" (UID: "6cb92420-4e6c-4407-9a54-93f003d1c5e9") : secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: I1201 19:48:13.247686 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.247918 4888 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.248009 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert podName:06a8d696-66ab-49ef-b858-2245cc6e0023 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:17.247988201 +0000 UTC m=+897.119018115 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" (UID: "06a8d696-66ab-49ef-b858-2245cc6e0023") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.440547 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" podUID="e75f9e52-ceaa-463a-ba65-ed651715c4f4" Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.440576 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" podUID="b50af81b-6773-46f1-916e-0346848ba65e" Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.441332 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" podUID="968d110b-5720-400e-9094-8ec39acb4cf6" Dec 01 19:48:13 crc kubenswrapper[4888]: I1201 19:48:13.452463 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:13 crc kubenswrapper[4888]: I1201 19:48:13.452557 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.452637 4888 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.452680 4888 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.452702 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:17.45268417 +0000 UTC m=+897.323714084 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "metrics-server-cert" not found Dec 01 19:48:13 crc kubenswrapper[4888]: E1201 19:48:13.452722 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:17.452713731 +0000 UTC m=+897.323743725 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "webhook-server-cert" not found Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.132401 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.133787 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.261466 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.507677 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.546151 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:16 crc kubenswrapper[4888]: I1201 19:48:16.957255 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:16 crc kubenswrapper[4888]: E1201 19:48:16.957495 4888 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:16 crc kubenswrapper[4888]: E1201 19:48:16.957552 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert podName:6cb92420-4e6c-4407-9a54-93f003d1c5e9 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:24.957533737 +0000 UTC m=+904.828563651 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert") pod "infra-operator-controller-manager-57548d458d-bjgvv" (UID: "6cb92420-4e6c-4407-9a54-93f003d1c5e9") : secret "infra-operator-webhook-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: I1201 19:48:17.261746 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.261988 4888 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.262059 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert podName:06a8d696-66ab-49ef-b858-2245cc6e0023 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:25.262044308 +0000 UTC m=+905.133074222 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" (UID: "06a8d696-66ab-49ef-b858-2245cc6e0023") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: I1201 19:48:17.465782 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:17 crc kubenswrapper[4888]: I1201 19:48:17.466001 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.466014 4888 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.466076 4888 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.466097 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:25.466077668 +0000 UTC m=+905.337107692 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "webhook-server-cert" not found Dec 01 19:48:17 crc kubenswrapper[4888]: E1201 19:48:17.466115 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs podName:b01f5340-ffdb-4963-9e49-47dad6f75642 nodeName:}" failed. No retries permitted until 2025-12-01 19:48:25.466105289 +0000 UTC m=+905.337135203 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs") pod "openstack-operator-controller-manager-6c58f9c549-nbj7h" (UID: "b01f5340-ffdb-4963-9e49-47dad6f75642") : secret "metrics-server-cert" not found Dec 01 19:48:18 crc kubenswrapper[4888]: I1201 19:48:18.496792 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kg5r9" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" containerID="cri-o://651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" gracePeriod=2 Dec 01 19:48:19 crc kubenswrapper[4888]: I1201 19:48:19.504897 4888 generic.go:334] "Generic (PLEG): container finished" podID="73e11a83-0e8b-45c0-b658-2e510650e935" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" exitCode=0 Dec 01 19:48:19 crc kubenswrapper[4888]: I1201 19:48:19.504937 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerDied","Data":"651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e"} Dec 01 19:48:20 crc kubenswrapper[4888]: I1201 19:48:20.038174 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:48:20 crc kubenswrapper[4888]: I1201 19:48:20.038631 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:48:24 crc kubenswrapper[4888]: E1201 19:48:24.544358 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 01 19:48:24 crc kubenswrapper[4888]: E1201 19:48:24.545023 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-phcgp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-d4tzz_openstack-operators(7914a20a-7747-446f-a496-deecd734fb83): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:24 crc kubenswrapper[4888]: I1201 19:48:24.986827 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:24 crc kubenswrapper[4888]: I1201 19:48:24.993522 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6cb92420-4e6c-4407-9a54-93f003d1c5e9-cert\") pod \"infra-operator-controller-manager-57548d458d-bjgvv\" (UID: \"6cb92420-4e6c-4407-9a54-93f003d1c5e9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.148633 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-9qggh" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.157504 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.289690 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.315634 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/06a8d696-66ab-49ef-b858-2245cc6e0023-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp\" (UID: \"06a8d696-66ab-49ef-b858-2245cc6e0023\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.492527 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.492586 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.506453 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-webhook-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.508389 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b01f5340-ffdb-4963-9e49-47dad6f75642-metrics-certs\") pod \"openstack-operator-controller-manager-6c58f9c549-nbj7h\" (UID: \"b01f5340-ffdb-4963-9e49-47dad6f75642\") " pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.547652 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-bqksd" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.556713 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.609119 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-5v8kh" Dec 01 19:48:25 crc kubenswrapper[4888]: I1201 19:48:25.616916 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.627566 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:440cde33d3a2a0c545cd1c110a3634eb85544370f448865b97a13c38034b0172" Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.627756 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:440cde33d3a2a0c545cd1c110a3634eb85544370f448865b97a13c38034b0172,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jpc5b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-668d9c48b9-jszb6_openstack-operators(9f5355dc-205f-4dca-91cf-39209ca1a7b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.958500 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.959283 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.959715 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:25 crc kubenswrapper[4888]: E1201 19:48:25.959743 4888 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-kg5r9" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" Dec 01 19:48:26 crc kubenswrapper[4888]: E1201 19:48:26.565693 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 01 19:48:26 crc kubenswrapper[4888]: E1201 19:48:26.565882 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-95qv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-kjwft_openstack-operators(1529e922-fd00-4f32-878a-d8a322a7b6b7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:27 crc kubenswrapper[4888]: E1201 19:48:27.162106 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 01 19:48:27 crc kubenswrapper[4888]: E1201 19:48:27.162339 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8m8cl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-ntzlv_openstack-operators(f94185be-1233-4c97-add2-b6e2fcd22827): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:27 crc kubenswrapper[4888]: E1201 19:48:27.885171 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 01 19:48:27 crc kubenswrapper[4888]: E1201 19:48:27.885423 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hd57w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-5mfth_openstack-operators(da594cb2-bb6a-4028-a609-68385c474377): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:28 crc kubenswrapper[4888]: E1201 19:48:28.700081 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 01 19:48:28 crc kubenswrapper[4888]: E1201 19:48:28.700601 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pbfnj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-tcnh9_openstack-operators(8ad47b0a-b049-45fa-afea-44eb4d5be85f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:32 crc kubenswrapper[4888]: E1201 19:48:32.153615 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 01 19:48:32 crc kubenswrapper[4888]: E1201 19:48:32.153818 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ckxbl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-psjhd_openstack-operators(922916d6-2e57-4087-b5ae-24c6318f180a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:32 crc kubenswrapper[4888]: E1201 19:48:32.782550 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 01 19:48:32 crc kubenswrapper[4888]: E1201 19:48:32.782753 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lgmrx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-v2mfg_openstack-operators(ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:33 crc kubenswrapper[4888]: E1201 19:48:33.447844 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3" Dec 01 19:48:33 crc kubenswrapper[4888]: E1201 19:48:33.448363 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:986861e5a0a9954f63581d9d55a30f8057883cefea489415d76257774526eea3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ljs89,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-546d4bdf48-446zc_openstack-operators(b7f34996-fe75-4c30-9e22-022f644f7c89): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:34 crc kubenswrapper[4888]: E1201 19:48:34.440491 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 01 19:48:34 crc kubenswrapper[4888]: E1201 19:48:34.440700 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rcnk7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-ql8v9_openstack-operators(e11be1d4-dbcb-4e6b-a97a-918425cb85ce): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:34 crc kubenswrapper[4888]: E1201 19:48:34.441883 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" podUID="e11be1d4-dbcb-4e6b-a97a-918425cb85ce" Dec 01 19:48:34 crc kubenswrapper[4888]: E1201 19:48:34.604539 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" podUID="e11be1d4-dbcb-4e6b-a97a-918425cb85ce" Dec 01 19:48:35 crc kubenswrapper[4888]: E1201 19:48:35.958042 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:35 crc kubenswrapper[4888]: E1201 19:48:35.959447 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:35 crc kubenswrapper[4888]: E1201 19:48:35.959799 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" cmd=["grpc_health_probe","-addr=:50051"] Dec 01 19:48:35 crc kubenswrapper[4888]: E1201 19:48:35.959886 4888 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-kg5r9" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" Dec 01 19:48:37 crc kubenswrapper[4888]: E1201 19:48:37.138914 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 01 19:48:37 crc kubenswrapper[4888]: E1201 19:48:37.139150 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6nv86,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-z2xkh_openstack-operators(4651cf7c-a7f9-4137-9d3b-6a656746f373): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:37 crc kubenswrapper[4888]: I1201 19:48:37.903764 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.028829 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content\") pod \"73e11a83-0e8b-45c0-b658-2e510650e935\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.028878 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8qwk\" (UniqueName: \"kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk\") pod \"73e11a83-0e8b-45c0-b658-2e510650e935\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.028952 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities\") pod \"73e11a83-0e8b-45c0-b658-2e510650e935\" (UID: \"73e11a83-0e8b-45c0-b658-2e510650e935\") " Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.029952 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities" (OuterVolumeSpecName: "utilities") pod "73e11a83-0e8b-45c0-b658-2e510650e935" (UID: "73e11a83-0e8b-45c0-b658-2e510650e935"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.035268 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk" (OuterVolumeSpecName: "kube-api-access-b8qwk") pod "73e11a83-0e8b-45c0-b658-2e510650e935" (UID: "73e11a83-0e8b-45c0-b658-2e510650e935"). InnerVolumeSpecName "kube-api-access-b8qwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.080601 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73e11a83-0e8b-45c0-b658-2e510650e935" (UID: "73e11a83-0e8b-45c0-b658-2e510650e935"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.144835 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.144913 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8qwk\" (UniqueName: \"kubernetes.io/projected/73e11a83-0e8b-45c0-b658-2e510650e935-kube-api-access-b8qwk\") on node \"crc\" DevicePath \"\"" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.144928 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73e11a83-0e8b-45c0-b658-2e510650e935-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.532442 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv"] Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.543345 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp"] Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.629048 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h"] Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.642469 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kg5r9" event={"ID":"73e11a83-0e8b-45c0-b658-2e510650e935","Type":"ContainerDied","Data":"9f19fdd29749b6a9bba68e91f99cc8a13d2b8e2462f4787d43a5339289839988"} Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.642506 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kg5r9" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.642534 4888 scope.go:117] "RemoveContainer" containerID="651ac3c093c18c3bfaeeb367e4384a19c4248fa59e48a5c1e3bdbb8dc17e932e" Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.666306 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:38 crc kubenswrapper[4888]: I1201 19:48:38.671057 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kg5r9"] Dec 01 19:48:38 crc kubenswrapper[4888]: W1201 19:48:38.786727 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6cb92420_4e6c_4407_9a54_93f003d1c5e9.slice/crio-35af32e07bedef9345e29d2a2925369280d5c5cf31ff8e42993dda870faf3f42 WatchSource:0}: Error finding container 35af32e07bedef9345e29d2a2925369280d5c5cf31ff8e42993dda870faf3f42: Status 404 returned error can't find the container with id 35af32e07bedef9345e29d2a2925369280d5c5cf31ff8e42993dda870faf3f42 Dec 01 19:48:38 crc kubenswrapper[4888]: W1201 19:48:38.813723 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb01f5340_ffdb_4963_9e49_47dad6f75642.slice/crio-ffb6d5b3e9f67db0494d14fbc551a36544e98c45205e09aa40f43748d814eecb WatchSource:0}: Error finding container ffb6d5b3e9f67db0494d14fbc551a36544e98c45205e09aa40f43748d814eecb: Status 404 returned error can't find the container with id ffb6d5b3e9f67db0494d14fbc551a36544e98c45205e09aa40f43748d814eecb Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.049541 4888 scope.go:117] "RemoveContainer" containerID="eab9582f31ae30913c841b2bc2d531656d1d952df0f82f8eadbf54948e1660ce" Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.565157 4888 scope.go:117] "RemoveContainer" containerID="27f70d29850344200e196fb064030b1f57bdcb8c35851cc3b172804c099d19e0" Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.652529 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" event={"ID":"b50af81b-6773-46f1-916e-0346848ba65e","Type":"ContainerStarted","Data":"48eab52b246762618d2b7974e145aa5decf3b09a4a0d3e192e7d70670e4c9e18"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.655335 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" event={"ID":"6cb92420-4e6c-4407-9a54-93f003d1c5e9","Type":"ContainerStarted","Data":"35af32e07bedef9345e29d2a2925369280d5c5cf31ff8e42993dda870faf3f42"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.657071 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" event={"ID":"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2","Type":"ContainerStarted","Data":"91da520c79d5545fdd92bdae1acaaf5b7332fe39926d063d52d5fe36c05749fc"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.657798 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" event={"ID":"06a8d696-66ab-49ef-b858-2245cc6e0023","Type":"ContainerStarted","Data":"803058d58413cc0359024dfd76d34e556f5ca8a02d5499bf2c601f4989975787"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.658797 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" event={"ID":"516e9598-68a9-431a-84af-725e3a053e66","Type":"ContainerStarted","Data":"635c0c8e30d698b0c2c4bac67fa2f8e3eb507603ae891d14ad6ea98840605032"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.660213 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" event={"ID":"a363b1d3-f519-41df-bdf8-e80b83edab4d","Type":"ContainerStarted","Data":"c96af62d47c12f50cbc593d3becf0ac8516112e585784c71651dd65748332b67"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.661297 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" event={"ID":"1b3586db-3f49-4ee4-aed0-5e4d469fad92","Type":"ContainerStarted","Data":"7f1ebfe82f189e87beb699acd85d03167a6860b260fcb5f64cb3e66a22aba854"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.663388 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" event={"ID":"a0f5d22c-34c3-40c2-889c-b7900120919c","Type":"ContainerStarted","Data":"e9045315da834b6459db82ac0d6d3e92b4b34d57eb94743d20ae433ea8f40c15"} Dec 01 19:48:39 crc kubenswrapper[4888]: I1201 19:48:39.666729 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" event={"ID":"b01f5340-ffdb-4963-9e49-47dad6f75642","Type":"ContainerStarted","Data":"ffb6d5b3e9f67db0494d14fbc551a36544e98c45205e09aa40f43748d814eecb"} Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.476887 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" path="/var/lib/kubelet/pods/73e11a83-0e8b-45c0-b658-2e510650e935/volumes" Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.685920 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" event={"ID":"968d110b-5720-400e-9094-8ec39acb4cf6","Type":"ContainerStarted","Data":"6db0c5b4ec70c797b9ef55da189d3c649fef808eeb2e0d9a287626d481d29f3b"} Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.689301 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" event={"ID":"b01f5340-ffdb-4963-9e49-47dad6f75642","Type":"ContainerStarted","Data":"fa4152cf2b733fef4dd972fb856a1cbd468dd426a1cef3dc03430a22bfd91bfa"} Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.689473 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.704548 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" event={"ID":"e75f9e52-ceaa-463a-ba65-ed651715c4f4","Type":"ContainerStarted","Data":"4d77eb4acbe87b5bf2fce6a62506cbd8c09de3fba26016c13036e43347a46932"} Dec 01 19:48:40 crc kubenswrapper[4888]: I1201 19:48:40.875125 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" podStartSLOduration=31.875094828 podStartE2EDuration="31.875094828s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:48:40.871482507 +0000 UTC m=+920.742512421" watchObservedRunningTime="2025-12-01 19:48:40.875094828 +0000 UTC m=+920.746124732" Dec 01 19:48:41 crc kubenswrapper[4888]: I1201 19:48:41.753108 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" event={"ID":"1f11c3a5-7276-48d5-9dc1-389ab98ffc11","Type":"ContainerStarted","Data":"254f2fa097c58edea2dc5160dfee22d1b088597d058202d670fce6561b8d942a"} Dec 01 19:48:44 crc kubenswrapper[4888]: E1201 19:48:44.542163 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" podUID="1529e922-fd00-4f32-878a-d8a322a7b6b7" Dec 01 19:48:44 crc kubenswrapper[4888]: I1201 19:48:44.858141 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" event={"ID":"1529e922-fd00-4f32-878a-d8a322a7b6b7","Type":"ContainerStarted","Data":"44c0011781db13a4f3487a6a0fb370c4489061d56f7fcb9bd73d860f8b3cb059"} Dec 01 19:48:44 crc kubenswrapper[4888]: I1201 19:48:44.861222 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" event={"ID":"3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2","Type":"ContainerStarted","Data":"dd9a34f7f0f456066882115e18df2a380d53f025a0d35e04664f924556dd805a"} Dec 01 19:48:44 crc kubenswrapper[4888]: I1201 19:48:44.861777 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:44 crc kubenswrapper[4888]: I1201 19:48:44.863896 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" Dec 01 19:48:44 crc kubenswrapper[4888]: I1201 19:48:44.942702 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-8wh4g" podStartSLOduration=4.019401804 podStartE2EDuration="36.942676325s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.084240254 +0000 UTC m=+889.955270168" lastFinishedPulling="2025-12-01 19:48:43.007514765 +0000 UTC m=+922.878544689" observedRunningTime="2025-12-01 19:48:44.935643877 +0000 UTC m=+924.806673791" watchObservedRunningTime="2025-12-01 19:48:44.942676325 +0000 UTC m=+924.813706259" Dec 01 19:48:45 crc kubenswrapper[4888]: I1201 19:48:45.592630 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6c58f9c549-nbj7h" Dec 01 19:48:50 crc kubenswrapper[4888]: I1201 19:48:50.094138 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:48:50 crc kubenswrapper[4888]: I1201 19:48:50.100325 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:48:50 crc kubenswrapper[4888]: I1201 19:48:50.100369 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:48:50 crc kubenswrapper[4888]: I1201 19:48:50.101095 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:48:50 crc kubenswrapper[4888]: I1201 19:48:50.101156 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0" gracePeriod=600 Dec 01 19:48:51 crc kubenswrapper[4888]: I1201 19:48:51.114232 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0" exitCode=0 Dec 01 19:48:51 crc kubenswrapper[4888]: I1201 19:48:51.114293 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0"} Dec 01 19:48:51 crc kubenswrapper[4888]: I1201 19:48:51.114407 4888 scope.go:117] "RemoveContainer" containerID="74307380ea6264d30dfc6c82d5e5a057d7c95f62590ce7836b721c0fe4587a1d" Dec 01 19:48:53 crc kubenswrapper[4888]: E1201 19:48:53.986830 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81" Dec 01 19:48:53 crc kubenswrapper[4888]: E1201 19:48:53.989456 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s5t8z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp_openstack-operators(06a8d696-66ab-49ef-b858-2245cc6e0023): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:48:57 crc kubenswrapper[4888]: E1201 19:48:57.884435 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" podUID="7914a20a-7747-446f-a496-deecd734fb83" Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.046982 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" podUID="06a8d696-66ab-49ef-b858-2245cc6e0023" Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.070126 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" podUID="8ad47b0a-b049-45fa-afea-44eb4d5be85f" Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.073763 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" podUID="da594cb2-bb6a-4028-a609-68385c474377" Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.168177 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" podUID="4651cf7c-a7f9-4137-9d3b-6a656746f373" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.174935 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" event={"ID":"e11be1d4-dbcb-4e6b-a97a-918425cb85ce","Type":"ContainerStarted","Data":"d27c254c7fde1dfcbf4b328efb6ad99a170bfcadb27eb861bb1869ecd0db8d11"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.179920 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" event={"ID":"8ad47b0a-b049-45fa-afea-44eb4d5be85f","Type":"ContainerStarted","Data":"212b10f52d96d920828ccd47d2f7745c331592a8b12782ff1a42ad97d261a103"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.182367 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" event={"ID":"da594cb2-bb6a-4028-a609-68385c474377","Type":"ContainerStarted","Data":"6da5c0ae2d28fe18a0face8424c82170972ab44a240b9ca52dd5e9b6cb1319da"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.275463 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-ql8v9" podStartSLOduration=2.993947273 podStartE2EDuration="49.275437302s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.265836302 +0000 UTC m=+891.136866216" lastFinishedPulling="2025-12-01 19:48:57.547326331 +0000 UTC m=+937.418356245" observedRunningTime="2025-12-01 19:48:58.202668139 +0000 UTC m=+938.073698063" watchObservedRunningTime="2025-12-01 19:48:58.275437302 +0000 UTC m=+938.146467216" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.281390 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" event={"ID":"4651cf7c-a7f9-4137-9d3b-6a656746f373","Type":"ContainerStarted","Data":"4b174c57fc2179bcfa40481c37feaa61a6f6c64ceffcfa1df8f03c675de9fddf"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.287634 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" event={"ID":"a0f5d22c-34c3-40c2-889c-b7900120919c","Type":"ContainerStarted","Data":"7b9f3acb2e9c35e151904ed7779f0d5a51f23a84ef2013f66a536ec9d8beb086"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.290558 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.295479 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.314370 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" podUID="9f5355dc-205f-4dca-91cf-39209ca1a7b3" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.317917 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" event={"ID":"b50af81b-6773-46f1-916e-0346848ba65e","Type":"ContainerStarted","Data":"c8665e747272a8123382712fb094a0e3ba1a242dbabf81ae1e87a15e4f290426"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.322830 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.323770 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.326245 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-lbp4z" podStartSLOduration=3.594509533 podStartE2EDuration="50.326040921s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.825172748 +0000 UTC m=+890.696202662" lastFinishedPulling="2025-12-01 19:48:57.556704136 +0000 UTC m=+937.427734050" observedRunningTime="2025-12-01 19:48:58.323563151 +0000 UTC m=+938.194593075" watchObservedRunningTime="2025-12-01 19:48:58.326040921 +0000 UTC m=+938.197070835" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.329379 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" event={"ID":"7914a20a-7747-446f-a496-deecd734fb83","Type":"ContainerStarted","Data":"56e9b84371ee5db19f2c63d3fd42c8c25e48d0a2adacb9b87dd89fefc9b62a4b"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.354203 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" event={"ID":"06a8d696-66ab-49ef-b858-2245cc6e0023","Type":"ContainerStarted","Data":"877e3a84827d5e8b4dbab6d62d4a3d1af07dcefde8724fc8deac72d48eb9b956"} Dec 01 19:48:58 crc kubenswrapper[4888]: E1201 19:48:58.360863 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" podUID="06a8d696-66ab-49ef-b858-2245cc6e0023" Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.375945 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e"} Dec 01 19:48:58 crc kubenswrapper[4888]: I1201 19:48:58.585092 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-cht8z" podStartSLOduration=3.36614724 podStartE2EDuration="49.585072682s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.335494969 +0000 UTC m=+891.206524883" lastFinishedPulling="2025-12-01 19:48:57.554420421 +0000 UTC m=+937.425450325" observedRunningTime="2025-12-01 19:48:58.481876789 +0000 UTC m=+938.352906713" watchObservedRunningTime="2025-12-01 19:48:58.585072682 +0000 UTC m=+938.456102586" Dec 01 19:48:59 crc kubenswrapper[4888]: E1201 19:48:58.692605 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" podUID="b7f34996-fe75-4c30-9e22-022f644f7c89" Dec 01 19:48:59 crc kubenswrapper[4888]: E1201 19:48:59.260427 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" podUID="ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.396304 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" event={"ID":"968d110b-5720-400e-9094-8ec39acb4cf6","Type":"ContainerStarted","Data":"2e06052601dc3e6349efcd543d26751fc3968b9055d1d4e215671bc1dbcb0df0"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.398123 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.398920 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.400386 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" event={"ID":"b7f34996-fe75-4c30-9e22-022f644f7c89","Type":"ContainerStarted","Data":"d181ec11ad86cf5bb9937b2cd042a410cb699666621c4147353e567280cd8d33"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.407874 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" event={"ID":"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02","Type":"ContainerStarted","Data":"f56e2ec47ef33ead9c5da21464616dc9f250b814ef59e9e9fe097f4b75a9435c"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.425472 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" event={"ID":"e75f9e52-ceaa-463a-ba65-ed651715c4f4","Type":"ContainerStarted","Data":"db56aee3781d44acb89eb1fe733b5ff7d8b9ade06e615ca18f8d9e607d1b0b01"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.427740 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.430315 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.449763 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" event={"ID":"1529e922-fd00-4f32-878a-d8a322a7b6b7","Type":"ContainerStarted","Data":"572866a168b2f78c7db57e54d6dcaccd66ec7f8d6264870209ce32e4c139d5fd"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.450423 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:48:59 crc kubenswrapper[4888]: E1201 19:48:59.476494 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" podUID="f94185be-1233-4c97-add2-b6e2fcd22827" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.478841 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" event={"ID":"1f11c3a5-7276-48d5-9dc1-389ab98ffc11","Type":"ContainerStarted","Data":"8127c21642e7ec7f0a805e2788b04a7c463e4ed14d08457432ac49f79e92c8ae"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.479731 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.484576 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.486642 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" event={"ID":"1b3586db-3f49-4ee4-aed0-5e4d469fad92","Type":"ContainerStarted","Data":"a30c6e00ef3e7e2ebabf43764edd34a4d1edbf06552bb8b32ad7d7711621dc32"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.490981 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.494642 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-pxs95" podStartSLOduration=5.252021116 podStartE2EDuration="51.494622405s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.314610601 +0000 UTC m=+891.185640515" lastFinishedPulling="2025-12-01 19:48:57.55721189 +0000 UTC m=+937.428241804" observedRunningTime="2025-12-01 19:48:59.46185467 +0000 UTC m=+939.332884654" watchObservedRunningTime="2025-12-01 19:48:59.494622405 +0000 UTC m=+939.365652319" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.495480 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.495572 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" podStartSLOduration=4.137618268 podStartE2EDuration="51.495566631s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.189255295 +0000 UTC m=+890.060285209" lastFinishedPulling="2025-12-01 19:48:57.547203658 +0000 UTC m=+937.418233572" observedRunningTime="2025-12-01 19:48:59.490869859 +0000 UTC m=+939.361899773" watchObservedRunningTime="2025-12-01 19:48:59.495566631 +0000 UTC m=+939.366596545" Dec 01 19:48:59 crc kubenswrapper[4888]: E1201 19:48:59.509461 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" podUID="922916d6-2e57-4087-b5ae-24c6318f180a" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.513321 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" event={"ID":"516e9598-68a9-431a-84af-725e3a053e66","Type":"ContainerStarted","Data":"93a0ec631ee3cee4c9054f5920f62c37483d6f24a28066f636a98b15e5e895de"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.514172 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.556671 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.568065 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" event={"ID":"a363b1d3-f519-41df-bdf8-e80b83edab4d","Type":"ContainerStarted","Data":"8dbf27f5fc1f258905c760f4e34c6d5891998031e82b390a5526fe6328deef4b"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.569399 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.573315 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" event={"ID":"9f5355dc-205f-4dca-91cf-39209ca1a7b3","Type":"ContainerStarted","Data":"88eec00fbdb264c9e088a509c0ebce0a41be69f9293629d71b3d94ccae46679c"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.585763 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" event={"ID":"6cb92420-4e6c-4407-9a54-93f003d1c5e9","Type":"ContainerStarted","Data":"7a0b92bf110db462004740b9167e4993f0f247750b6ecbfcb0b4a81f915b1053"} Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.597096 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" Dec 01 19:48:59 crc kubenswrapper[4888]: E1201 19:48:59.597082 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" podUID="06a8d696-66ab-49ef-b858-2245cc6e0023" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.615537 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-g79qf" podStartSLOduration=4.390758151 podStartE2EDuration="50.615517857s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.335760127 +0000 UTC m=+891.206790041" lastFinishedPulling="2025-12-01 19:48:57.560519823 +0000 UTC m=+937.431549747" observedRunningTime="2025-12-01 19:48:59.59650585 +0000 UTC m=+939.467535774" watchObservedRunningTime="2025-12-01 19:48:59.615517857 +0000 UTC m=+939.486547771" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.908501 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-f58dj" podStartSLOduration=4.346802588 podStartE2EDuration="51.908480165s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.000101442 +0000 UTC m=+889.871131376" lastFinishedPulling="2025-12-01 19:48:57.561779039 +0000 UTC m=+937.432808953" observedRunningTime="2025-12-01 19:48:59.901964051 +0000 UTC m=+939.772993975" watchObservedRunningTime="2025-12-01 19:48:59.908480165 +0000 UTC m=+939.779510079" Dec 01 19:48:59 crc kubenswrapper[4888]: I1201 19:48:59.939519 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gqwgf" podStartSLOduration=4.461404953 podStartE2EDuration="51.939503631s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.088928519 +0000 UTC m=+889.959958433" lastFinishedPulling="2025-12-01 19:48:57.567027187 +0000 UTC m=+937.438057111" observedRunningTime="2025-12-01 19:48:59.932635147 +0000 UTC m=+939.803665081" watchObservedRunningTime="2025-12-01 19:48:59.939503631 +0000 UTC m=+939.810533545" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.011237 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-sszrn" podStartSLOduration=5.259452288 podStartE2EDuration="52.011217665s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.806594585 +0000 UTC m=+890.677624499" lastFinishedPulling="2025-12-01 19:48:57.558359952 +0000 UTC m=+937.429389876" observedRunningTime="2025-12-01 19:49:00.009778254 +0000 UTC m=+939.880808168" watchObservedRunningTime="2025-12-01 19:49:00.011217665 +0000 UTC m=+939.882247579" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.056784 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-f6vhb" podStartSLOduration=4.787471797 podStartE2EDuration="51.056766201s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.290132709 +0000 UTC m=+891.161162633" lastFinishedPulling="2025-12-01 19:48:57.559427133 +0000 UTC m=+937.430457037" observedRunningTime="2025-12-01 19:49:00.03548538 +0000 UTC m=+939.906515294" watchObservedRunningTime="2025-12-01 19:49:00.056766201 +0000 UTC m=+939.927796115" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.704592 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" event={"ID":"922916d6-2e57-4087-b5ae-24c6318f180a","Type":"ContainerStarted","Data":"4a973d306be017dcdda4c3d7b53556600c808191c84ee8948a1bce9857b4ee9f"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.722536 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" event={"ID":"4651cf7c-a7f9-4137-9d3b-6a656746f373","Type":"ContainerStarted","Data":"bc73026d9f798f7feddf01974b180443054a451e6a608507da30a2bee877f7f1"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.722576 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.749444 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" event={"ID":"b7f34996-fe75-4c30-9e22-022f644f7c89","Type":"ContainerStarted","Data":"29db55770179e15c25625aa49b898a7bbeb1acae2624fc40943735076754137b"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.749867 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.758491 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" event={"ID":"f94185be-1233-4c97-add2-b6e2fcd22827","Type":"ContainerStarted","Data":"d782166241374eb7a3ae224d0a2927dd0c2393e832c1440418f91ded147d538b"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.762825 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" event={"ID":"8ad47b0a-b049-45fa-afea-44eb4d5be85f","Type":"ContainerStarted","Data":"2bd152dee861ee019d7e0e3ddee3ad11cd4c8faf55bc676589b0bcdef65b84e6"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.763261 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.765384 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" event={"ID":"6cb92420-4e6c-4407-9a54-93f003d1c5e9","Type":"ContainerStarted","Data":"07a9daae75de22464136cdaad2293a9080d2ceafa57f9338983fff47a95f1ce4"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.765740 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.776501 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" event={"ID":"7914a20a-7747-446f-a496-deecd734fb83","Type":"ContainerStarted","Data":"1c3705dcdb48c98bf0847f6724fb824993af2112a3b13a467f992910537d7dc2"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.776741 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.775142 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" podStartSLOduration=4.893129318 podStartE2EDuration="52.774356715s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.896356789 +0000 UTC m=+890.767386703" lastFinishedPulling="2025-12-01 19:48:58.777584186 +0000 UTC m=+938.648614100" observedRunningTime="2025-12-01 19:49:00.772818951 +0000 UTC m=+940.643848865" watchObservedRunningTime="2025-12-01 19:49:00.774356715 +0000 UTC m=+940.645386639" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.780143 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" event={"ID":"da594cb2-bb6a-4028-a609-68385c474377","Type":"ContainerStarted","Data":"4ff3a9fa67818b1296c609f1b0363e96046a2e9be2dc6863118b7ab89868e48d"} Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.780175 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.800377 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" podStartSLOduration=4.89408761 podStartE2EDuration="52.800358069s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.288713638 +0000 UTC m=+891.159743552" lastFinishedPulling="2025-12-01 19:48:59.194984107 +0000 UTC m=+939.066014011" observedRunningTime="2025-12-01 19:49:00.798234549 +0000 UTC m=+940.669264463" watchObservedRunningTime="2025-12-01 19:49:00.800358069 +0000 UTC m=+940.671387983" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.820952 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" podStartSLOduration=34.062488612 podStartE2EDuration="52.82093593s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:38.788899344 +0000 UTC m=+918.659929258" lastFinishedPulling="2025-12-01 19:48:57.547346662 +0000 UTC m=+937.418376576" observedRunningTime="2025-12-01 19:49:00.817546724 +0000 UTC m=+940.688576648" watchObservedRunningTime="2025-12-01 19:49:00.82093593 +0000 UTC m=+940.691965844" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.871396 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" podStartSLOduration=3.171626993 podStartE2EDuration="52.871377833s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.346606527 +0000 UTC m=+890.217636441" lastFinishedPulling="2025-12-01 19:49:00.046357367 +0000 UTC m=+939.917387281" observedRunningTime="2025-12-01 19:49:00.865118327 +0000 UTC m=+940.736148241" watchObservedRunningTime="2025-12-01 19:49:00.871377833 +0000 UTC m=+940.742407737" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.892728 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" podStartSLOduration=3.571098081 podStartE2EDuration="51.892705575s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.106303788 +0000 UTC m=+890.977333702" lastFinishedPulling="2025-12-01 19:48:59.427911282 +0000 UTC m=+939.298941196" observedRunningTime="2025-12-01 19:49:00.881711085 +0000 UTC m=+940.752741019" watchObservedRunningTime="2025-12-01 19:49:00.892705575 +0000 UTC m=+940.763735489" Dec 01 19:49:00 crc kubenswrapper[4888]: I1201 19:49:00.941545 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" podStartSLOduration=4.554709288 podStartE2EDuration="52.941516743s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.811919118 +0000 UTC m=+890.682949032" lastFinishedPulling="2025-12-01 19:48:59.198726573 +0000 UTC m=+939.069756487" observedRunningTime="2025-12-01 19:49:00.934362431 +0000 UTC m=+940.805392345" watchObservedRunningTime="2025-12-01 19:49:00.941516743 +0000 UTC m=+940.812546647" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.790138 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" event={"ID":"f94185be-1233-4c97-add2-b6e2fcd22827","Type":"ContainerStarted","Data":"82e193eaef155504a34c6d0ab05ed15095ce16209cbf2787210871e3933eb517"} Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.790644 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.794081 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" event={"ID":"ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02","Type":"ContainerStarted","Data":"d8736681aeb72104a65511a29f297419424940d050334102b3a8be92b8daace3"} Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.795148 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.797700 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" event={"ID":"9f5355dc-205f-4dca-91cf-39209ca1a7b3","Type":"ContainerStarted","Data":"67b8bfc77889f55f742acac63f0093233644e2f439fcdea95bb362ce548b2c86"} Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.797931 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.837643 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" event={"ID":"922916d6-2e57-4087-b5ae-24c6318f180a","Type":"ContainerStarted","Data":"319b553a71f6735d6d30ca3e398cab808a562173cdc56f7ea1d2d10652c2f9c1"} Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.837747 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.881545 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" podStartSLOduration=2.897069298 podStartE2EDuration="52.881516695s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:11.241531465 +0000 UTC m=+891.112561379" lastFinishedPulling="2025-12-01 19:49:01.225978862 +0000 UTC m=+941.097008776" observedRunningTime="2025-12-01 19:49:01.866957324 +0000 UTC m=+941.737987238" watchObservedRunningTime="2025-12-01 19:49:01.881516695 +0000 UTC m=+941.752546609" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.906643 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" podStartSLOduration=3.393229215 podStartE2EDuration="53.906613273s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.925436822 +0000 UTC m=+890.796466736" lastFinishedPulling="2025-12-01 19:49:01.43882088 +0000 UTC m=+941.309850794" observedRunningTime="2025-12-01 19:49:01.89196863 +0000 UTC m=+941.762998544" watchObservedRunningTime="2025-12-01 19:49:01.906613273 +0000 UTC m=+941.777643197" Dec 01 19:49:01 crc kubenswrapper[4888]: I1201 19:49:01.920109 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" podStartSLOduration=3.551709749 podStartE2EDuration="52.920087734s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.934102801 +0000 UTC m=+890.805132715" lastFinishedPulling="2025-12-01 19:49:00.302480786 +0000 UTC m=+940.173510700" observedRunningTime="2025-12-01 19:49:01.918790197 +0000 UTC m=+941.789820111" watchObservedRunningTime="2025-12-01 19:49:01.920087734 +0000 UTC m=+941.791117648" Dec 01 19:49:02 crc kubenswrapper[4888]: I1201 19:49:02.228444 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" podStartSLOduration=4.856183501 podStartE2EDuration="54.228430667s" podCreationTimestamp="2025-12-01 19:48:08 +0000 UTC" firstStartedPulling="2025-12-01 19:48:10.928506151 +0000 UTC m=+890.799536065" lastFinishedPulling="2025-12-01 19:49:00.300753317 +0000 UTC m=+940.171783231" observedRunningTime="2025-12-01 19:49:02.226496462 +0000 UTC m=+942.097526376" watchObservedRunningTime="2025-12-01 19:49:02.228430667 +0000 UTC m=+942.099460581" Dec 01 19:49:05 crc kubenswrapper[4888]: I1201 19:49:05.173527 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bjgvv" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.281480 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-5mfth" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.333811 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-kjwft" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.336068 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-446zc" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.446939 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-jszb6" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.523369 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-tcnh9" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.535757 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-z2xkh" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.589638 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-psjhd" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.737156 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-v2mfg" Dec 01 19:49:09 crc kubenswrapper[4888]: I1201 19:49:09.792106 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-d4tzz" Dec 01 19:49:10 crc kubenswrapper[4888]: I1201 19:49:10.194740 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-ntzlv" Dec 01 19:49:15 crc kubenswrapper[4888]: I1201 19:49:15.969161 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" event={"ID":"06a8d696-66ab-49ef-b858-2245cc6e0023","Type":"ContainerStarted","Data":"9a3de97379562e897ebe0cfe2d321a246e5f5b469fa4ef2e14fb72a360a10637"} Dec 01 19:49:15 crc kubenswrapper[4888]: I1201 19:49:15.970454 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:49:15 crc kubenswrapper[4888]: I1201 19:49:15.999812 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" podStartSLOduration=30.713867442 podStartE2EDuration="1m6.999786977s" podCreationTimestamp="2025-12-01 19:48:09 +0000 UTC" firstStartedPulling="2025-12-01 19:48:38.795803929 +0000 UTC m=+918.666833843" lastFinishedPulling="2025-12-01 19:49:15.081723454 +0000 UTC m=+954.952753378" observedRunningTime="2025-12-01 19:49:15.99209341 +0000 UTC m=+955.863123334" watchObservedRunningTime="2025-12-01 19:49:15.999786977 +0000 UTC m=+955.870816901" Dec 01 19:49:25 crc kubenswrapper[4888]: I1201 19:49:25.622820 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.539634 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:49:40 crc kubenswrapper[4888]: E1201 19:49:40.541011 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.541037 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" Dec 01 19:49:40 crc kubenswrapper[4888]: E1201 19:49:40.541073 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="extract-utilities" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.541079 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="extract-utilities" Dec 01 19:49:40 crc kubenswrapper[4888]: E1201 19:49:40.541091 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="extract-content" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.541098 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="extract-content" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.541248 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="73e11a83-0e8b-45c0-b658-2e510650e935" containerName="registry-server" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.542622 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.547522 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.547802 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-8vtth" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.547935 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.548116 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.553792 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.591871 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.591979 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7xtw\" (UniqueName: \"kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.621619 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.623148 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.628890 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.648149 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.693128 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.693207 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.693227 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.693270 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7xtw\" (UniqueName: \"kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.693300 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfckq\" (UniqueName: \"kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.694152 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.723733 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7xtw\" (UniqueName: \"kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw\") pod \"dnsmasq-dns-675f4bcbfc-xkq8t\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.795000 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.795442 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfckq\" (UniqueName: \"kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.795557 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.796601 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.796612 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.816220 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfckq\" (UniqueName: \"kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq\") pod \"dnsmasq-dns-78dd6ddcc-v7tfn\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.865754 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:49:40 crc kubenswrapper[4888]: I1201 19:49:40.942602 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:49:41 crc kubenswrapper[4888]: I1201 19:49:41.253656 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:49:41 crc kubenswrapper[4888]: I1201 19:49:41.266402 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:49:41 crc kubenswrapper[4888]: I1201 19:49:41.363011 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:49:41 crc kubenswrapper[4888]: W1201 19:49:41.369402 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5527fcdc_e61a_47d8_b2e2_a1e25696907f.slice/crio-84cb4e937770851a2b84a9db4c0deafc52812c184b2897e29050e7d76a250ca3 WatchSource:0}: Error finding container 84cb4e937770851a2b84a9db4c0deafc52812c184b2897e29050e7d76a250ca3: Status 404 returned error can't find the container with id 84cb4e937770851a2b84a9db4c0deafc52812c184b2897e29050e7d76a250ca3 Dec 01 19:49:42 crc kubenswrapper[4888]: I1201 19:49:42.142965 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" event={"ID":"5527fcdc-e61a-47d8-b2e2-a1e25696907f","Type":"ContainerStarted","Data":"84cb4e937770851a2b84a9db4c0deafc52812c184b2897e29050e7d76a250ca3"} Dec 01 19:49:42 crc kubenswrapper[4888]: I1201 19:49:42.144566 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" event={"ID":"f99a272a-3511-41a9-aa81-053890b4514f","Type":"ContainerStarted","Data":"a199c234c896bcc5573749e942f01f9d2b9cb73d1f2effefe9432ca4a02813bb"} Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.748242 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.767360 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.768917 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.790343 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.872630 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.872920 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.872963 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xtk2\" (UniqueName: \"kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.974106 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.974164 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xtk2\" (UniqueName: \"kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.974234 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.975743 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:43 crc kubenswrapper[4888]: I1201 19:49:43.976020 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.001278 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xtk2\" (UniqueName: \"kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2\") pod \"dnsmasq-dns-666b6646f7-wt4mk\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.111500 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.163440 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.195704 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.197374 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.216041 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.278530 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.278605 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrlhm\" (UniqueName: \"kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.278657 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.379545 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.379589 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrlhm\" (UniqueName: \"kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.379613 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.380560 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.380578 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.400770 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrlhm\" (UniqueName: \"kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm\") pod \"dnsmasq-dns-57d769cc4f-b4qkl\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.543230 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.738944 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.983532 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.986708 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.992632 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.992773 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.992944 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.993013 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.993154 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.993413 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.995966 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-tpks7" Dec 01 19:49:44 crc kubenswrapper[4888]: I1201 19:49:44.997202 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.098093 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.138698 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.138944 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139067 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139141 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nrzz\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139227 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139299 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139327 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139361 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139732 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.139798 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.140001 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.184287 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" event={"ID":"15ce5608-5fe1-4873-a3c6-cc8223a60705","Type":"ContainerStarted","Data":"3af0a6bd630ee2cff3e521a0c724281484ccdab7b7497545e22de8a49b589b35"} Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.187341 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" event={"ID":"ab44723f-b49a-495c-b034-5ca0c17b1e3d","Type":"ContainerStarted","Data":"f5d630da4fe9380d109a271163619c20b364524888c4e8fad466e5b19e8fae93"} Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.241789 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.241841 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.241882 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.242310 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.242364 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.242390 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nrzz\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.242969 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.244825 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.245741 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.246645 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.246695 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.247921 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.246717 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.249881 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.249977 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.250012 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.250418 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.252053 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.254684 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.257677 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.259796 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.261877 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nrzz\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.263642 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.306680 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.307942 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.310324 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.310570 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.310790 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.310889 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.312173 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.312270 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-l5lzk" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.320807 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.329736 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.352217 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453324 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mhkk\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453852 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453888 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453914 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453936 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.453975 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.454000 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.454033 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.454089 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.454129 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.454155 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555352 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555415 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555489 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555552 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555584 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555607 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555660 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mhkk\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555775 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555795 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.555815 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.560850 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.562019 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.562245 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.562348 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.565737 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.565785 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.568330 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.572812 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.574915 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.583655 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.588825 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mhkk\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.624163 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:45 crc kubenswrapper[4888]: I1201 19:49:45.633109 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.144680 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:49:46 crc kubenswrapper[4888]: W1201 19:49:46.169516 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf46b2389_73b7_4b69_a316_ab9e17fc8d1f.slice/crio-c2de545a47b4fd07c5db1312457674639b43060ee2db7b9ea6d745032d28c473 WatchSource:0}: Error finding container c2de545a47b4fd07c5db1312457674639b43060ee2db7b9ea6d745032d28c473: Status 404 returned error can't find the container with id c2de545a47b4fd07c5db1312457674639b43060ee2db7b9ea6d745032d28c473 Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.212426 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerStarted","Data":"c2de545a47b4fd07c5db1312457674639b43060ee2db7b9ea6d745032d28c473"} Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.728204 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.729649 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.735095 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-chzsc" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.735238 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.735273 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.735297 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.744875 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.757021 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 19:49:46 crc kubenswrapper[4888]: W1201 19:49:46.780881 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf994d099_faac_4c30_8cab_e6ef9b8772cd.slice/crio-79229edc17ab7298a11cb3a77eecbb93a14e4aa81c24064f3a33e3e1da6d91f4 WatchSource:0}: Error finding container 79229edc17ab7298a11cb3a77eecbb93a14e4aa81c24064f3a33e3e1da6d91f4: Status 404 returned error can't find the container with id 79229edc17ab7298a11cb3a77eecbb93a14e4aa81c24064f3a33e3e1da6d91f4 Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.786393 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910720 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910770 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910788 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910817 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrs4k\" (UniqueName: \"kubernetes.io/projected/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kube-api-access-mrs4k\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910850 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910873 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kolla-config\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910892 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:46 crc kubenswrapper[4888]: I1201 19:49:46.910919 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-default\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.012667 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.012999 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013027 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013088 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrs4k\" (UniqueName: \"kubernetes.io/projected/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kube-api-access-mrs4k\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013135 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013165 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kolla-config\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013206 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013251 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-default\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.013816 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.014522 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kolla-config\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.014830 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.015544 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-config-data-default\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.015938 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bca8bbd3-bfa6-4767-8196-e085c6160a7f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.027631 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.039150 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrs4k\" (UniqueName: \"kubernetes.io/projected/bca8bbd3-bfa6-4767-8196-e085c6160a7f-kube-api-access-mrs4k\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.055023 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bca8bbd3-bfa6-4767-8196-e085c6160a7f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.075632 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bca8bbd3-bfa6-4767-8196-e085c6160a7f\") " pod="openstack/openstack-galera-0" Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.224260 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerStarted","Data":"79229edc17ab7298a11cb3a77eecbb93a14e4aa81c24064f3a33e3e1da6d91f4"} Dec 01 19:49:47 crc kubenswrapper[4888]: I1201 19:49:47.370528 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.020769 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.022130 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.036450 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.036937 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-v22kv" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.037103 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.037230 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.037353 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219121 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219165 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219215 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219250 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlj4r\" (UniqueName: \"kubernetes.io/projected/879f6704-c02e-420a-90ec-23bfb1ce35fe-kube-api-access-mlj4r\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219298 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219319 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219352 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.219383 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428173 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428534 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428560 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428581 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428605 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.428638 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlj4r\" (UniqueName: \"kubernetes.io/projected/879f6704-c02e-420a-90ec-23bfb1ce35fe-kube-api-access-mlj4r\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.429637 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.429670 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.430242 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.430237 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.430887 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.434008 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.448546 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.455215 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/879f6704-c02e-420a-90ec-23bfb1ce35fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.672589 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/879f6704-c02e-420a-90ec-23bfb1ce35fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.696851 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.738857 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlj4r\" (UniqueName: \"kubernetes.io/projected/879f6704-c02e-420a-90ec-23bfb1ce35fe-kube-api-access-mlj4r\") pod \"openstack-cell1-galera-0\" (UID: \"879f6704-c02e-420a-90ec-23bfb1ce35fe\") " pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.833557 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.834601 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.834704 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.839691 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-c8k6s" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.839901 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.840033 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.888207 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-config-data\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.888279 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.888357 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq9wp\" (UniqueName: \"kubernetes.io/projected/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kube-api-access-dq9wp\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.888385 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.888426 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kolla-config\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.973211 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.990347 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq9wp\" (UniqueName: \"kubernetes.io/projected/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kube-api-access-dq9wp\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.990407 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.990469 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kolla-config\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.990527 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-config-data\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.990564 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.994400 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-memcached-tls-certs\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.994842 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-combined-ca-bundle\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.995550 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kolla-config\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:48 crc kubenswrapper[4888]: I1201 19:49:48.996036 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-config-data\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:49 crc kubenswrapper[4888]: I1201 19:49:49.016244 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq9wp\" (UniqueName: \"kubernetes.io/projected/0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac-kube-api-access-dq9wp\") pod \"memcached-0\" (UID: \"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac\") " pod="openstack/memcached-0" Dec 01 19:49:49 crc kubenswrapper[4888]: I1201 19:49:49.183533 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.676791 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.703830 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.707734 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-7952c" Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.719683 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.804632 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnptg\" (UniqueName: \"kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg\") pod \"kube-state-metrics-0\" (UID: \"b148905b-79c9-4889-bf95-4727a495f95a\") " pod="openstack/kube-state-metrics-0" Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.906389 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnptg\" (UniqueName: \"kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg\") pod \"kube-state-metrics-0\" (UID: \"b148905b-79c9-4889-bf95-4727a495f95a\") " pod="openstack/kube-state-metrics-0" Dec 01 19:49:50 crc kubenswrapper[4888]: I1201 19:49:50.954587 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnptg\" (UniqueName: \"kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg\") pod \"kube-state-metrics-0\" (UID: \"b148905b-79c9-4889-bf95-4727a495f95a\") " pod="openstack/kube-state-metrics-0" Dec 01 19:49:51 crc kubenswrapper[4888]: I1201 19:49:51.137693 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.148468 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-b4v8q"] Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.150043 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.152926 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b4v8q"] Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.163582 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-gvxx7" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.165197 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.165364 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323413 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323464 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/644ca96f-aee4-40b9-957b-b18e28634a66-scripts\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323495 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-combined-ca-bundle\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323532 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-log-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323549 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-ovn-controller-tls-certs\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323582 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx8l9\" (UniqueName: \"kubernetes.io/projected/644ca96f-aee4-40b9-957b-b18e28634a66-kube-api-access-lx8l9\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.323657 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.425543 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx8l9\" (UniqueName: \"kubernetes.io/projected/644ca96f-aee4-40b9-957b-b18e28634a66-kube-api-access-lx8l9\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426102 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426219 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426245 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/644ca96f-aee4-40b9-957b-b18e28634a66-scripts\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426265 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-combined-ca-bundle\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426299 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-log-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.426343 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-ovn-controller-tls-certs\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.430024 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.430048 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-log-ovn\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.433474 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/644ca96f-aee4-40b9-957b-b18e28634a66-scripts\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.433713 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/644ca96f-aee4-40b9-957b-b18e28634a66-var-run\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.441008 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-combined-ca-bundle\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.446060 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/644ca96f-aee4-40b9-957b-b18e28634a66-ovn-controller-tls-certs\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.447829 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx8l9\" (UniqueName: \"kubernetes.io/projected/644ca96f-aee4-40b9-957b-b18e28634a66-kube-api-access-lx8l9\") pod \"ovn-controller-b4v8q\" (UID: \"644ca96f-aee4-40b9-957b-b18e28634a66\") " pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.490456 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.616107 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-9rcwl"] Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.618036 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.635661 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9rcwl"] Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730058 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-etc-ovs\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730122 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7030c3c7-8abe-4d3f-9279-a90d581f551b-scripts\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730159 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-run\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730225 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qlrv\" (UniqueName: \"kubernetes.io/projected/7030c3c7-8abe-4d3f-9279-a90d581f551b-kube-api-access-5qlrv\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730303 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-lib\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.730377 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-log\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832169 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-etc-ovs\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832255 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7030c3c7-8abe-4d3f-9279-a90d581f551b-scripts\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832311 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-run\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832348 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qlrv\" (UniqueName: \"kubernetes.io/projected/7030c3c7-8abe-4d3f-9279-a90d581f551b-kube-api-access-5qlrv\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832385 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-lib\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832438 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-log\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832581 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-run\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832714 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-log\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832721 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-etc-ovs\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.832884 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/7030c3c7-8abe-4d3f-9279-a90d581f551b-var-lib\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.836298 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7030c3c7-8abe-4d3f-9279-a90d581f551b-scripts\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.868319 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qlrv\" (UniqueName: \"kubernetes.io/projected/7030c3c7-8abe-4d3f-9279-a90d581f551b-kube-api-access-5qlrv\") pod \"ovn-controller-ovs-9rcwl\" (UID: \"7030c3c7-8abe-4d3f-9279-a90d581f551b\") " pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:53 crc kubenswrapper[4888]: I1201 19:49:53.968629 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.356794 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.358589 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.360576 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.361155 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.366458 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.366669 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-v2x9f" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.367255 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.385747 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.485742 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.485784 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gzf7\" (UniqueName: \"kubernetes.io/projected/b3edb1da-ac59-4264-833a-499b13fb5071-kube-api-access-4gzf7\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.485814 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.485834 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.486013 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.486226 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-config\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.486257 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.486287 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625649 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625697 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gzf7\" (UniqueName: \"kubernetes.io/projected/b3edb1da-ac59-4264-833a-499b13fb5071-kube-api-access-4gzf7\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625726 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625750 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625796 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625871 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-config\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625891 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.625917 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.626942 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.627430 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.627599 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-config\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.628418 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3edb1da-ac59-4264-833a-499b13fb5071-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.631075 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.635069 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.642144 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3edb1da-ac59-4264-833a-499b13fb5071-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.681966 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gzf7\" (UniqueName: \"kubernetes.io/projected/b3edb1da-ac59-4264-833a-499b13fb5071-kube-api-access-4gzf7\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.686352 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b3edb1da-ac59-4264-833a-499b13fb5071\") " pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:56 crc kubenswrapper[4888]: I1201 19:49:56.987845 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.368595 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.369915 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.376346 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.376359 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-cj689" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.377975 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.382298 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.390045 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.437642 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.437869 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vgl5\" (UniqueName: \"kubernetes.io/projected/b1e54c26-f189-448c-be1f-57d58fcd50bf-kube-api-access-9vgl5\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.437996 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.438112 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.438246 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.438345 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.438525 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-config\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.438563 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.545416 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546131 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546291 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546401 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vgl5\" (UniqueName: \"kubernetes.io/projected/b1e54c26-f189-448c-be1f-57d58fcd50bf-kube-api-access-9vgl5\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546527 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546623 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546756 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.546864 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.547070 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-config\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.548210 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-config\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.548474 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.549935 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1e54c26-f189-448c-be1f-57d58fcd50bf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.553490 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.602049 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.602356 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1e54c26-f189-448c-be1f-57d58fcd50bf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.604219 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.609915 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vgl5\" (UniqueName: \"kubernetes.io/projected/b1e54c26-f189-448c-be1f-57d58fcd50bf-kube-api-access-9vgl5\") pod \"ovsdbserver-nb-0\" (UID: \"b1e54c26-f189-448c-be1f-57d58fcd50bf\") " pod="openstack/ovsdbserver-nb-0" Dec 01 19:49:57 crc kubenswrapper[4888]: I1201 19:49:57.686224 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.069208 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.069831 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2nrzz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(f46b2389-73b7-4b69-a316-ab9e17fc8d1f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.071005 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.258449 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.896247 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.896460 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9xtk2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-wt4mk_openstack(ab44723f-b49a-495c-b034-5ca0c17b1e3d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.897720 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.898629 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.898762 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f7xtw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-xkq8t_openstack(5527fcdc-e61a-47d8-b2e2-a1e25696907f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:50:09 crc kubenswrapper[4888]: E1201 19:50:09.899926 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" podUID="5527fcdc-e61a-47d8-b2e2-a1e25696907f" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.955354 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.955554 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rrlhm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-b4qkl_openstack(15ce5608-5fe1-4873-a3c6-cc8223a60705): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.956706 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.956773 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.956859 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zfckq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-v7tfn_openstack(f99a272a-3511-41a9-aa81-053890b4514f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:09.958036 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" podUID="f99a272a-3511-41a9-aa81-053890b4514f" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:10.288020 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" Dec 01 19:50:10 crc kubenswrapper[4888]: E1201 19:50:10.288543 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.223634 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.279872 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerStarted","Data":"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d"} Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.281595 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"879f6704-c02e-420a-90ec-23bfb1ce35fe","Type":"ContainerStarted","Data":"f6932e4122a394c67ead2f213b6b745a70e316355d04d2134ea86bc206db562f"} Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.359999 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.472617 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7xtw\" (UniqueName: \"kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw\") pod \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.473085 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config\") pod \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\" (UID: \"5527fcdc-e61a-47d8-b2e2-a1e25696907f\") " Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.474646 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config" (OuterVolumeSpecName: "config") pod "5527fcdc-e61a-47d8-b2e2-a1e25696907f" (UID: "5527fcdc-e61a-47d8-b2e2-a1e25696907f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.478580 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw" (OuterVolumeSpecName: "kube-api-access-f7xtw") pod "5527fcdc-e61a-47d8-b2e2-a1e25696907f" (UID: "5527fcdc-e61a-47d8-b2e2-a1e25696907f"). InnerVolumeSpecName "kube-api-access-f7xtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.561288 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.581613 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7xtw\" (UniqueName: \"kubernetes.io/projected/5527fcdc-e61a-47d8-b2e2-a1e25696907f-kube-api-access-f7xtw\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.581644 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5527fcdc-e61a-47d8-b2e2-a1e25696907f-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.648349 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:50:11 crc kubenswrapper[4888]: W1201 19:50:11.652743 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbca8bbd3_bfa6_4767_8196_e085c6160a7f.slice/crio-85b289a971d89080014a3249513d1b7ce88238cea24d317c5c2443eacd3e447c WatchSource:0}: Error finding container 85b289a971d89080014a3249513d1b7ce88238cea24d317c5c2443eacd3e447c: Status 404 returned error can't find the container with id 85b289a971d89080014a3249513d1b7ce88238cea24d317c5c2443eacd3e447c Dec 01 19:50:11 crc kubenswrapper[4888]: W1201 19:50:11.655480 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod644ca96f_aee4_40b9_957b_b18e28634a66.slice/crio-3f533e9cfeffdaf14367153489d5d172699d95c927de5d923ed3072ef400f32a WatchSource:0}: Error finding container 3f533e9cfeffdaf14367153489d5d172699d95c927de5d923ed3072ef400f32a: Status 404 returned error can't find the container with id 3f533e9cfeffdaf14367153489d5d172699d95c927de5d923ed3072ef400f32a Dec 01 19:50:11 crc kubenswrapper[4888]: W1201 19:50:11.657461 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb148905b_79c9_4889_bf95_4727a495f95a.slice/crio-e3d64e850300a6fdfe43ff103b511d13bd1f3e18c8aa0bb1178025d9aafae341 WatchSource:0}: Error finding container e3d64e850300a6fdfe43ff103b511d13bd1f3e18c8aa0bb1178025d9aafae341: Status 404 returned error can't find the container with id e3d64e850300a6fdfe43ff103b511d13bd1f3e18c8aa0bb1178025d9aafae341 Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.658906 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.665242 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b4v8q"] Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.682592 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc\") pod \"f99a272a-3511-41a9-aa81-053890b4514f\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.682709 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfckq\" (UniqueName: \"kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq\") pod \"f99a272a-3511-41a9-aa81-053890b4514f\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.682759 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config\") pod \"f99a272a-3511-41a9-aa81-053890b4514f\" (UID: \"f99a272a-3511-41a9-aa81-053890b4514f\") " Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.683499 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config" (OuterVolumeSpecName: "config") pod "f99a272a-3511-41a9-aa81-053890b4514f" (UID: "f99a272a-3511-41a9-aa81-053890b4514f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.683529 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f99a272a-3511-41a9-aa81-053890b4514f" (UID: "f99a272a-3511-41a9-aa81-053890b4514f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.686872 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq" (OuterVolumeSpecName: "kube-api-access-zfckq") pod "f99a272a-3511-41a9-aa81-053890b4514f" (UID: "f99a272a-3511-41a9-aa81-053890b4514f"). InnerVolumeSpecName "kube-api-access-zfckq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.702629 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 19:50:11 crc kubenswrapper[4888]: W1201 19:50:11.702805 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e0fe4f5_1c76_4c72_a2cd_aa07f2e4c4ac.slice/crio-8a2f21dbebfe9b825320b00ad6681f8cbe710858a2634e2a0a9e47ceb9f57f7d WatchSource:0}: Error finding container 8a2f21dbebfe9b825320b00ad6681f8cbe710858a2634e2a0a9e47ceb9f57f7d: Status 404 returned error can't find the container with id 8a2f21dbebfe9b825320b00ad6681f8cbe710858a2634e2a0a9e47ceb9f57f7d Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.784241 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.784275 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f99a272a-3511-41a9-aa81-053890b4514f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.784288 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfckq\" (UniqueName: \"kubernetes.io/projected/f99a272a-3511-41a9-aa81-053890b4514f-kube-api-access-zfckq\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.810867 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 19:50:11 crc kubenswrapper[4888]: I1201 19:50:11.900895 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-9rcwl"] Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.293920 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bca8bbd3-bfa6-4767-8196-e085c6160a7f","Type":"ContainerStarted","Data":"85b289a971d89080014a3249513d1b7ce88238cea24d317c5c2443eacd3e447c"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.295481 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" event={"ID":"f99a272a-3511-41a9-aa81-053890b4514f","Type":"ContainerDied","Data":"a199c234c896bcc5573749e942f01f9d2b9cb73d1f2effefe9432ca4a02813bb"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.295512 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-v7tfn" Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.296635 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b148905b-79c9-4889-bf95-4727a495f95a","Type":"ContainerStarted","Data":"e3d64e850300a6fdfe43ff103b511d13bd1f3e18c8aa0bb1178025d9aafae341"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.298172 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac","Type":"ContainerStarted","Data":"8a2f21dbebfe9b825320b00ad6681f8cbe710858a2634e2a0a9e47ceb9f57f7d"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.299542 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q" event={"ID":"644ca96f-aee4-40b9-957b-b18e28634a66","Type":"ContainerStarted","Data":"3f533e9cfeffdaf14367153489d5d172699d95c927de5d923ed3072ef400f32a"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.300708 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.300897 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-xkq8t" event={"ID":"5527fcdc-e61a-47d8-b2e2-a1e25696907f","Type":"ContainerDied","Data":"84cb4e937770851a2b84a9db4c0deafc52812c184b2897e29050e7d76a250ca3"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.302278 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9rcwl" event={"ID":"7030c3c7-8abe-4d3f-9279-a90d581f551b","Type":"ContainerStarted","Data":"1a91e927782aab2b91606a086598f94ba54f07ec811dbd9eb3c2f6a1452ceced"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.305695 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b1e54c26-f189-448c-be1f-57d58fcd50bf","Type":"ContainerStarted","Data":"326573d2ee969b3e5b31aa49689879d1efb35684fa14555c10025db08d0b91f1"} Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.358533 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.370918 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-v7tfn"] Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.401027 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.420955 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-xkq8t"] Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.464437 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5527fcdc-e61a-47d8-b2e2-a1e25696907f" path="/var/lib/kubelet/pods/5527fcdc-e61a-47d8-b2e2-a1e25696907f/volumes" Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.465181 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f99a272a-3511-41a9-aa81-053890b4514f" path="/var/lib/kubelet/pods/f99a272a-3511-41a9-aa81-053890b4514f/volumes" Dec 01 19:50:12 crc kubenswrapper[4888]: I1201 19:50:12.571754 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 19:50:13 crc kubenswrapper[4888]: I1201 19:50:13.319407 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b3edb1da-ac59-4264-833a-499b13fb5071","Type":"ContainerStarted","Data":"b96ff11cd85542beb191fd791209bcc36541c81a17208a9f076ce61337bb549b"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.370960 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b3edb1da-ac59-4264-833a-499b13fb5071","Type":"ContainerStarted","Data":"b53fafddf0ed62952e3c047b2e44d2d1973565c420f6e12e18eb70f9f41006b1"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.377398 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b1e54c26-f189-448c-be1f-57d58fcd50bf","Type":"ContainerStarted","Data":"6d6e37fbefa0210dcd183edb7eb5521bedc73985ea6873afc6abfa3ea5764a5c"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.380569 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bca8bbd3-bfa6-4767-8196-e085c6160a7f","Type":"ContainerStarted","Data":"24ff6af3ed453d098ec932412c08371f9528926893af29fff96a84bd3f1bdf07"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.383149 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"879f6704-c02e-420a-90ec-23bfb1ce35fe","Type":"ContainerStarted","Data":"3f40a92210c0934c6f77826f9c992cfed1dda93e2743e735bfadc023c4155b2c"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.385633 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b148905b-79c9-4889-bf95-4727a495f95a","Type":"ContainerStarted","Data":"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.385781 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.387451 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac","Type":"ContainerStarted","Data":"6c315dd4fe85563b029700435228fff44d57d0e9f3160eb1dd10377892f9fddc"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.387915 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.390117 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q" event={"ID":"644ca96f-aee4-40b9-957b-b18e28634a66","Type":"ContainerStarted","Data":"f2b9750a8e4e66b3e46a670f7c735898996c0c3073c27d1f5cfc37267291dca4"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.390633 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-b4v8q" Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.392074 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9rcwl" event={"ID":"7030c3c7-8abe-4d3f-9279-a90d581f551b","Type":"ContainerStarted","Data":"51dbb2b4e6706473b40d03c9701fda9c19f67109c9f18fe7cc74c602929086f0"} Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.440026 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=22.361656580000002 podStartE2EDuration="29.439999269s" podCreationTimestamp="2025-12-01 19:49:50 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.659895823 +0000 UTC m=+1011.530925727" lastFinishedPulling="2025-12-01 19:50:18.738238492 +0000 UTC m=+1018.609268416" observedRunningTime="2025-12-01 19:50:19.439994119 +0000 UTC m=+1019.311024033" watchObservedRunningTime="2025-12-01 19:50:19.439999269 +0000 UTC m=+1019.311029183" Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.465325 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=24.579367536 podStartE2EDuration="31.465294393s" podCreationTimestamp="2025-12-01 19:49:48 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.705536252 +0000 UTC m=+1011.576566166" lastFinishedPulling="2025-12-01 19:50:18.591463109 +0000 UTC m=+1018.462493023" observedRunningTime="2025-12-01 19:50:19.464639655 +0000 UTC m=+1019.335669569" watchObservedRunningTime="2025-12-01 19:50:19.465294393 +0000 UTC m=+1019.336324317" Dec 01 19:50:19 crc kubenswrapper[4888]: I1201 19:50:19.500926 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-b4v8q" podStartSLOduration=20.401936077 podStartE2EDuration="27.500909428s" podCreationTimestamp="2025-12-01 19:49:52 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.657587998 +0000 UTC m=+1011.528617912" lastFinishedPulling="2025-12-01 19:50:18.756561309 +0000 UTC m=+1018.627591263" observedRunningTime="2025-12-01 19:50:19.499091167 +0000 UTC m=+1019.370121081" watchObservedRunningTime="2025-12-01 19:50:19.500909428 +0000 UTC m=+1019.371939342" Dec 01 19:50:20 crc kubenswrapper[4888]: I1201 19:50:20.404866 4888 generic.go:334] "Generic (PLEG): container finished" podID="7030c3c7-8abe-4d3f-9279-a90d581f551b" containerID="51dbb2b4e6706473b40d03c9701fda9c19f67109c9f18fe7cc74c602929086f0" exitCode=0 Dec 01 19:50:20 crc kubenswrapper[4888]: I1201 19:50:20.404962 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9rcwl" event={"ID":"7030c3c7-8abe-4d3f-9279-a90d581f551b","Type":"ContainerDied","Data":"51dbb2b4e6706473b40d03c9701fda9c19f67109c9f18fe7cc74c602929086f0"} Dec 01 19:50:21 crc kubenswrapper[4888]: I1201 19:50:21.414705 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9rcwl" event={"ID":"7030c3c7-8abe-4d3f-9279-a90d581f551b","Type":"ContainerStarted","Data":"ea03c2c58fa4fe6e9803787d215d12716d800c917b87636a0ad5c63736d4acf2"} Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.424079 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-9rcwl" event={"ID":"7030c3c7-8abe-4d3f-9279-a90d581f551b","Type":"ContainerStarted","Data":"2a71dfd22c52d838a2e22fe13bd951ff0dce47b9024114a93d4fec0a49aa8c49"} Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.424799 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.424815 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.426429 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b3edb1da-ac59-4264-833a-499b13fb5071","Type":"ContainerStarted","Data":"36005cd16018363ee5eeae5d7cd88dbaa97b57ac487db6553debbe4248f5e497"} Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.433698 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b1e54c26-f189-448c-be1f-57d58fcd50bf","Type":"ContainerStarted","Data":"e76e0d53fdd15b9cb8d7eeb3b86e8f71e9afd6a82af9e8637728eac30f488a0e"} Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.436070 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerID="6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16" exitCode=0 Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.436105 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" event={"ID":"ab44723f-b49a-495c-b034-5ca0c17b1e3d","Type":"ContainerDied","Data":"6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16"} Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.638379 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-9rcwl" podStartSLOduration=22.798667172000002 podStartE2EDuration="29.638359414s" podCreationTimestamp="2025-12-01 19:49:53 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.909295813 +0000 UTC m=+1011.780325727" lastFinishedPulling="2025-12-01 19:50:18.748988055 +0000 UTC m=+1018.620017969" observedRunningTime="2025-12-01 19:50:22.637972023 +0000 UTC m=+1022.509001957" watchObservedRunningTime="2025-12-01 19:50:22.638359414 +0000 UTC m=+1022.509389328" Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.660023 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=16.53030471 podStartE2EDuration="26.660001114s" podCreationTimestamp="2025-12-01 19:49:56 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.820517407 +0000 UTC m=+1011.691547321" lastFinishedPulling="2025-12-01 19:50:21.950213801 +0000 UTC m=+1021.821243725" observedRunningTime="2025-12-01 19:50:22.655116656 +0000 UTC m=+1022.526146570" watchObservedRunningTime="2025-12-01 19:50:22.660001114 +0000 UTC m=+1022.531031038" Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.686820 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 01 19:50:22 crc kubenswrapper[4888]: I1201 19:50:22.724428 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=18.511234967 podStartE2EDuration="27.724409962s" podCreationTimestamp="2025-12-01 19:49:55 +0000 UTC" firstStartedPulling="2025-12-01 19:50:12.772065065 +0000 UTC m=+1012.643094979" lastFinishedPulling="2025-12-01 19:50:21.98524006 +0000 UTC m=+1021.856269974" observedRunningTime="2025-12-01 19:50:22.720120151 +0000 UTC m=+1022.591150055" watchObservedRunningTime="2025-12-01 19:50:22.724409962 +0000 UTC m=+1022.595439876" Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.469102 4888 generic.go:334] "Generic (PLEG): container finished" podID="bca8bbd3-bfa6-4767-8196-e085c6160a7f" containerID="24ff6af3ed453d098ec932412c08371f9528926893af29fff96a84bd3f1bdf07" exitCode=0 Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.469205 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bca8bbd3-bfa6-4767-8196-e085c6160a7f","Type":"ContainerDied","Data":"24ff6af3ed453d098ec932412c08371f9528926893af29fff96a84bd3f1bdf07"} Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.472057 4888 generic.go:334] "Generic (PLEG): container finished" podID="879f6704-c02e-420a-90ec-23bfb1ce35fe" containerID="3f40a92210c0934c6f77826f9c992cfed1dda93e2743e735bfadc023c4155b2c" exitCode=0 Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.472137 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"879f6704-c02e-420a-90ec-23bfb1ce35fe","Type":"ContainerDied","Data":"3f40a92210c0934c6f77826f9c992cfed1dda93e2743e735bfadc023c4155b2c"} Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.480302 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" event={"ID":"ab44723f-b49a-495c-b034-5ca0c17b1e3d","Type":"ContainerStarted","Data":"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103"} Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.538891 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" podStartSLOduration=3.333682805 podStartE2EDuration="40.538866901s" podCreationTimestamp="2025-12-01 19:49:43 +0000 UTC" firstStartedPulling="2025-12-01 19:49:44.782074681 +0000 UTC m=+984.653104595" lastFinishedPulling="2025-12-01 19:50:21.987258777 +0000 UTC m=+1021.858288691" observedRunningTime="2025-12-01 19:50:23.537263295 +0000 UTC m=+1023.408293219" watchObservedRunningTime="2025-12-01 19:50:23.538866901 +0000 UTC m=+1023.409896815" Dec 01 19:50:23 crc kubenswrapper[4888]: I1201 19:50:23.988041 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.037703 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.112860 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.185111 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.488969 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerStarted","Data":"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33"} Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.490692 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bca8bbd3-bfa6-4767-8196-e085c6160a7f","Type":"ContainerStarted","Data":"176439ffb6a038c1b1d5f2f81964df916c7e7df5e23ccf1a5b2055fc7ba9530c"} Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.492439 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"879f6704-c02e-420a-90ec-23bfb1ce35fe","Type":"ContainerStarted","Data":"7c0bcad1e6f29561d94767e56d62a30b168b01566f792c04af4b0077b6d72cc6"} Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.495499 4888 generic.go:334] "Generic (PLEG): container finished" podID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerID="e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c" exitCode=0 Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.495630 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" event={"ID":"15ce5608-5fe1-4873-a3c6-cc8223a60705","Type":"ContainerDied","Data":"e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c"} Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.496033 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.541107 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=32.442736946 podStartE2EDuration="39.541084979s" podCreationTimestamp="2025-12-01 19:49:45 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.655702345 +0000 UTC m=+1011.526732259" lastFinishedPulling="2025-12-01 19:50:18.754050388 +0000 UTC m=+1018.625080292" observedRunningTime="2025-12-01 19:50:24.534929945 +0000 UTC m=+1024.405959869" watchObservedRunningTime="2025-12-01 19:50:24.541084979 +0000 UTC m=+1024.412114893" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.544865 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.587479 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=31.044765754 podStartE2EDuration="38.587457188s" podCreationTimestamp="2025-12-01 19:49:46 +0000 UTC" firstStartedPulling="2025-12-01 19:50:11.236592176 +0000 UTC m=+1011.107622090" lastFinishedPulling="2025-12-01 19:50:18.7792836 +0000 UTC m=+1018.650313524" observedRunningTime="2025-12-01 19:50:24.578614808 +0000 UTC m=+1024.449644722" watchObservedRunningTime="2025-12-01 19:50:24.587457188 +0000 UTC m=+1024.458487102" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.686493 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.725468 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.815111 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.861205 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.862667 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.865495 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.872814 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.926660 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-v92p7"] Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.927930 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.937393 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.940280 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-v92p7"] Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.972716 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovs-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.972755 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c965736-0751-48aa-bf50-db27978e0e91-config\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.972873 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zg8lf\" (UniqueName: \"kubernetes.io/projected/9c965736-0751-48aa-bf50-db27978e0e91-kube-api-access-zg8lf\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.972934 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.972976 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.973000 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-combined-ca-bundle\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.973045 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.973066 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovn-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.973117 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgdxl\" (UniqueName: \"kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:24 crc kubenswrapper[4888]: I1201 19:50:24.973151 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.075097 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovs-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.075175 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c965736-0751-48aa-bf50-db27978e0e91-config\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.075484 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovs-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076292 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c965736-0751-48aa-bf50-db27978e0e91-config\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076341 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zg8lf\" (UniqueName: \"kubernetes.io/projected/9c965736-0751-48aa-bf50-db27978e0e91-kube-api-access-zg8lf\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076784 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076847 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076872 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-combined-ca-bundle\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076923 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.076952 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovn-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.077907 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.077532 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/9c965736-0751-48aa-bf50-db27978e0e91-ovn-rundir\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.077990 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgdxl\" (UniqueName: \"kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.078144 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.078222 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.079250 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.081180 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-combined-ca-bundle\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.082668 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c965736-0751-48aa-bf50-db27978e0e91-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.099575 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zg8lf\" (UniqueName: \"kubernetes.io/projected/9c965736-0751-48aa-bf50-db27978e0e91-kube-api-access-zg8lf\") pod \"ovn-controller-metrics-v92p7\" (UID: \"9c965736-0751-48aa-bf50-db27978e0e91\") " pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.109693 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgdxl\" (UniqueName: \"kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl\") pod \"dnsmasq-dns-7f896c8c65-xf7fh\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.153959 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.178305 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.179085 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.182127 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.184863 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.202044 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.255604 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-v92p7" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.281050 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.281305 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.281386 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.281512 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.281591 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nspjn\" (UniqueName: \"kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.382936 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.383022 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.383084 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.383282 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.383325 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nspjn\" (UniqueName: \"kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.386978 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.388844 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.390193 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.391350 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.405344 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nspjn\" (UniqueName: \"kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn\") pod \"dnsmasq-dns-86db49b7ff-r9wkq\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.514912 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.540767 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="dnsmasq-dns" containerID="cri-o://89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103" gracePeriod=10 Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.541485 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="dnsmasq-dns" containerID="cri-o://d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab" gracePeriod=10 Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.541751 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" event={"ID":"15ce5608-5fe1-4873-a3c6-cc8223a60705","Type":"ContainerStarted","Data":"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab"} Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.542975 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.595074 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.596147 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" podStartSLOduration=-9223371995.25865 podStartE2EDuration="41.596125458s" podCreationTimestamp="2025-12-01 19:49:44 +0000 UTC" firstStartedPulling="2025-12-01 19:49:45.10514802 +0000 UTC m=+984.976177944" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:50:25.584391377 +0000 UTC m=+1025.455421291" watchObservedRunningTime="2025-12-01 19:50:25.596125458 +0000 UTC m=+1025.467155372" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.798873 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:25 crc kubenswrapper[4888]: W1201 19:50:25.815563 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b1f88f0_e118_4ea3_861a_e505fc946ea5.slice/crio-768f90e1d4f5c975c1bea8058c983447727af888677a7a8c3800730abcc7e81d WatchSource:0}: Error finding container 768f90e1d4f5c975c1bea8058c983447727af888677a7a8c3800730abcc7e81d: Status 404 returned error can't find the container with id 768f90e1d4f5c975c1bea8058c983447727af888677a7a8c3800730abcc7e81d Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.822307 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.844387 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.847146 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.849420 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-x48tc" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.849609 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.850127 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.850352 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997112 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997201 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997228 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fd24\" (UniqueName: \"kubernetes.io/projected/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-kube-api-access-4fd24\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997418 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-scripts\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997618 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.997899 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:25 crc kubenswrapper[4888]: I1201 19:50:25.998258 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-config\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.099928 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.099994 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.100049 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-config\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.100068 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.100109 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.100132 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fd24\" (UniqueName: \"kubernetes.io/projected/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-kube-api-access-4fd24\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.100159 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-scripts\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.101237 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.101377 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-config\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.101499 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-scripts\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.107063 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.107581 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.109853 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.123547 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fd24\" (UniqueName: \"kubernetes.io/projected/b3cf90fd-3f01-4cf5-bb00-9d5c2e374448-kube-api-access-4fd24\") pod \"ovn-northd-0\" (UID: \"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448\") " pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.213691 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.219490 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.221935 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-v92p7"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.230899 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.242654 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.302967 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config\") pod \"15ce5608-5fe1-4873-a3c6-cc8223a60705\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.303109 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrlhm\" (UniqueName: \"kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm\") pod \"15ce5608-5fe1-4873-a3c6-cc8223a60705\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.303129 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc\") pod \"15ce5608-5fe1-4873-a3c6-cc8223a60705\" (UID: \"15ce5608-5fe1-4873-a3c6-cc8223a60705\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.312142 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm" (OuterVolumeSpecName: "kube-api-access-rrlhm") pod "15ce5608-5fe1-4873-a3c6-cc8223a60705" (UID: "15ce5608-5fe1-4873-a3c6-cc8223a60705"). InnerVolumeSpecName "kube-api-access-rrlhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.356999 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config" (OuterVolumeSpecName: "config") pod "15ce5608-5fe1-4873-a3c6-cc8223a60705" (UID: "15ce5608-5fe1-4873-a3c6-cc8223a60705"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.360941 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "15ce5608-5fe1-4873-a3c6-cc8223a60705" (UID: "15ce5608-5fe1-4873-a3c6-cc8223a60705"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.404759 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc\") pod \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.405299 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config\") pod \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.405348 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xtk2\" (UniqueName: \"kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2\") pod \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\" (UID: \"ab44723f-b49a-495c-b034-5ca0c17b1e3d\") " Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.405782 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrlhm\" (UniqueName: \"kubernetes.io/projected/15ce5608-5fe1-4873-a3c6-cc8223a60705-kube-api-access-rrlhm\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.405814 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.405828 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15ce5608-5fe1-4873-a3c6-cc8223a60705-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.409470 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2" (OuterVolumeSpecName: "kube-api-access-9xtk2") pod "ab44723f-b49a-495c-b034-5ca0c17b1e3d" (UID: "ab44723f-b49a-495c-b034-5ca0c17b1e3d"). InnerVolumeSpecName "kube-api-access-9xtk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.468030 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config" (OuterVolumeSpecName: "config") pod "ab44723f-b49a-495c-b034-5ca0c17b1e3d" (UID: "ab44723f-b49a-495c-b034-5ca0c17b1e3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.478403 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab44723f-b49a-495c-b034-5ca0c17b1e3d" (UID: "ab44723f-b49a-495c-b034-5ca0c17b1e3d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.511402 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.511444 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab44723f-b49a-495c-b034-5ca0c17b1e3d-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.511455 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xtk2\" (UniqueName: \"kubernetes.io/projected/ab44723f-b49a-495c-b034-5ca0c17b1e3d-kube-api-access-9xtk2\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.557661 4888 generic.go:334] "Generic (PLEG): container finished" podID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerID="e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d" exitCode=0 Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.557723 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" event={"ID":"5aaf257f-64d2-43c9-b470-b4c647c91654","Type":"ContainerDied","Data":"e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.557749 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" event={"ID":"5aaf257f-64d2-43c9-b470-b4c647c91654","Type":"ContainerStarted","Data":"455690b219df65d386fd3558df21435579e0098d5f63240aa0fa0a136d6e4052"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.561322 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-v92p7" event={"ID":"9c965736-0751-48aa-bf50-db27978e0e91","Type":"ContainerStarted","Data":"6e721ded8e84fe5de0f1ec17a9267136d577c2ceb2261ca22dbe6ad037ec3469"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.561365 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-v92p7" event={"ID":"9c965736-0751-48aa-bf50-db27978e0e91","Type":"ContainerStarted","Data":"4e694a4dca1d4c6ea7a966ad5209ca56fca406d2325a9e04cd639e8bc9adfcac"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.563110 4888 generic.go:334] "Generic (PLEG): container finished" podID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerID="6c1c1f95754b11f5399c40deed32ff6e955df31045ac91c6c50afce28c199229" exitCode=0 Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.563174 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" event={"ID":"4b1f88f0-e118-4ea3-861a-e505fc946ea5","Type":"ContainerDied","Data":"6c1c1f95754b11f5399c40deed32ff6e955df31045ac91c6c50afce28c199229"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.563214 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" event={"ID":"4b1f88f0-e118-4ea3-861a-e505fc946ea5","Type":"ContainerStarted","Data":"768f90e1d4f5c975c1bea8058c983447727af888677a7a8c3800730abcc7e81d"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.567132 4888 generic.go:334] "Generic (PLEG): container finished" podID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerID="d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab" exitCode=0 Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.567245 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" event={"ID":"15ce5608-5fe1-4873-a3c6-cc8223a60705","Type":"ContainerDied","Data":"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.567272 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" event={"ID":"15ce5608-5fe1-4873-a3c6-cc8223a60705","Type":"ContainerDied","Data":"3af0a6bd630ee2cff3e521a0c724281484ccdab7b7497545e22de8a49b589b35"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.567292 4888 scope.go:117] "RemoveContainer" containerID="d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.567500 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-b4qkl" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.576121 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerID="89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103" exitCode=0 Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.577670 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.580308 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" event={"ID":"ab44723f-b49a-495c-b034-5ca0c17b1e3d","Type":"ContainerDied","Data":"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.580370 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-wt4mk" event={"ID":"ab44723f-b49a-495c-b034-5ca0c17b1e3d","Type":"ContainerDied","Data":"f5d630da4fe9380d109a271163619c20b364524888c4e8fad466e5b19e8fae93"} Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.652954 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-v92p7" podStartSLOduration=2.651965969 podStartE2EDuration="2.651965969s" podCreationTimestamp="2025-12-01 19:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:50:26.629015801 +0000 UTC m=+1026.500045765" watchObservedRunningTime="2025-12-01 19:50:26.651965969 +0000 UTC m=+1026.522995883" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.716873 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 19:50:26 crc kubenswrapper[4888]: W1201 19:50:26.718839 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3cf90fd_3f01_4cf5_bb00_9d5c2e374448.slice/crio-63d465e6cb7161a24521132e2c607445cafcbcf2703358a507e86f74cef45ac5 WatchSource:0}: Error finding container 63d465e6cb7161a24521132e2c607445cafcbcf2703358a507e86f74cef45ac5: Status 404 returned error can't find the container with id 63d465e6cb7161a24521132e2c607445cafcbcf2703358a507e86f74cef45ac5 Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.772126 4888 scope.go:117] "RemoveContainer" containerID="e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.805692 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.806268 4888 scope.go:117] "RemoveContainer" containerID="d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab" Dec 01 19:50:26 crc kubenswrapper[4888]: E1201 19:50:26.806756 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab\": container with ID starting with d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab not found: ID does not exist" containerID="d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.806795 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab"} err="failed to get container status \"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab\": rpc error: code = NotFound desc = could not find container \"d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab\": container with ID starting with d948f08fc6dbf78b081425ca47f28328707679a2d5084025d6a9ecaa7eeff1ab not found: ID does not exist" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.806820 4888 scope.go:117] "RemoveContainer" containerID="e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c" Dec 01 19:50:26 crc kubenswrapper[4888]: E1201 19:50:26.807245 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c\": container with ID starting with e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c not found: ID does not exist" containerID="e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.807281 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c"} err="failed to get container status \"e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c\": rpc error: code = NotFound desc = could not find container \"e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c\": container with ID starting with e4412f7313548d5e6899f6273e74b7403ac556bbfcb503b036262d9033b1207c not found: ID does not exist" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.807328 4888 scope.go:117] "RemoveContainer" containerID="89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.842085 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-wt4mk"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.845883 4888 scope.go:117] "RemoveContainer" containerID="6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.855655 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.864392 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-b4qkl"] Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.874852 4888 scope.go:117] "RemoveContainer" containerID="89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103" Dec 01 19:50:26 crc kubenswrapper[4888]: E1201 19:50:26.875518 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103\": container with ID starting with 89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103 not found: ID does not exist" containerID="89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.875549 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103"} err="failed to get container status \"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103\": rpc error: code = NotFound desc = could not find container \"89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103\": container with ID starting with 89b868f0e3a6905b9c06e9a038f5db533092570ed375b1e412ac3824a3888103 not found: ID does not exist" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.875576 4888 scope.go:117] "RemoveContainer" containerID="6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16" Dec 01 19:50:26 crc kubenswrapper[4888]: E1201 19:50:26.875893 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16\": container with ID starting with 6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16 not found: ID does not exist" containerID="6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16" Dec 01 19:50:26 crc kubenswrapper[4888]: I1201 19:50:26.875942 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16"} err="failed to get container status \"6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16\": rpc error: code = NotFound desc = could not find container \"6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16\": container with ID starting with 6d2f49bb78c71e710d4de7a6e75093bcc84629a2e8f18b80d8a1d41d17f20a16 not found: ID does not exist" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.371365 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.371413 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.590575 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" event={"ID":"5aaf257f-64d2-43c9-b470-b4c647c91654","Type":"ContainerStarted","Data":"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b"} Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.590750 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.592391 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" event={"ID":"4b1f88f0-e118-4ea3-861a-e505fc946ea5","Type":"ContainerStarted","Data":"3e6210732d715ff37d595313a27167bd24d25866cf7af48c97dfd27770c5ba78"} Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.592580 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.593715 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448","Type":"ContainerStarted","Data":"63d465e6cb7161a24521132e2c607445cafcbcf2703358a507e86f74cef45ac5"} Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.612762 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" podStartSLOduration=2.612741988 podStartE2EDuration="2.612741988s" podCreationTimestamp="2025-12-01 19:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:50:27.608382205 +0000 UTC m=+1027.479412119" watchObservedRunningTime="2025-12-01 19:50:27.612741988 +0000 UTC m=+1027.483771902" Dec 01 19:50:27 crc kubenswrapper[4888]: I1201 19:50:27.634424 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" podStartSLOduration=3.634396869 podStartE2EDuration="3.634396869s" podCreationTimestamp="2025-12-01 19:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:50:27.626142286 +0000 UTC m=+1027.497172200" watchObservedRunningTime="2025-12-01 19:50:27.634396869 +0000 UTC m=+1027.505426783" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.462962 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" path="/var/lib/kubelet/pods/15ce5608-5fe1-4873-a3c6-cc8223a60705/volumes" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.463951 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" path="/var/lib/kubelet/pods/ab44723f-b49a-495c-b034-5ca0c17b1e3d/volumes" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.603667 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448","Type":"ContainerStarted","Data":"506e9bf7a2fdba8ac06e3d57d6f98b12567306871a78b9e2c832fc6f2f5b8da2"} Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.603707 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"b3cf90fd-3f01-4cf5-bb00-9d5c2e374448","Type":"ContainerStarted","Data":"c4556c5b14c007cbfb3b5862b94c5d29803e61f1a6ae132cf30884b71eb98d7d"} Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.603761 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.621652 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.426219592 podStartE2EDuration="3.621633524s" podCreationTimestamp="2025-12-01 19:50:25 +0000 UTC" firstStartedPulling="2025-12-01 19:50:26.7246528 +0000 UTC m=+1026.595682714" lastFinishedPulling="2025-12-01 19:50:27.920066732 +0000 UTC m=+1027.791096646" observedRunningTime="2025-12-01 19:50:28.621046298 +0000 UTC m=+1028.492076212" watchObservedRunningTime="2025-12-01 19:50:28.621633524 +0000 UTC m=+1028.492663438" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.974572 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 01 19:50:28 crc kubenswrapper[4888]: I1201 19:50:28.974627 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 01 19:50:29 crc kubenswrapper[4888]: I1201 19:50:29.616044 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 01 19:50:29 crc kubenswrapper[4888]: I1201 19:50:29.707444 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.149757 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.150731 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.328443 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.356441 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.356664 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="dnsmasq-dns" containerID="cri-o://3e6210732d715ff37d595313a27167bd24d25866cf7af48c97dfd27770c5ba78" gracePeriod=10 Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.364082 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.398722 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:50:31 crc kubenswrapper[4888]: E1201 19:50:31.399162 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="init" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399267 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="init" Dec 01 19:50:31 crc kubenswrapper[4888]: E1201 19:50:31.399308 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="init" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399316 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="init" Dec 01 19:50:31 crc kubenswrapper[4888]: E1201 19:50:31.399331 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399338 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: E1201 19:50:31.399365 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399372 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399561 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="15ce5608-5fe1-4873-a3c6-cc8223a60705" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.399581 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab44723f-b49a-495c-b034-5ca0c17b1e3d" containerName="dnsmasq-dns" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.400485 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.407788 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.527659 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.527737 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.527764 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.527783 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfm79\" (UniqueName: \"kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.527815 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.630269 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.631369 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.635092 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.635137 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.635155 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfm79\" (UniqueName: \"kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.635237 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.635910 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.636120 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.637029 4888 generic.go:334] "Generic (PLEG): container finished" podID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerID="3e6210732d715ff37d595313a27167bd24d25866cf7af48c97dfd27770c5ba78" exitCode=0 Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.637443 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" event={"ID":"4b1f88f0-e118-4ea3-861a-e505fc946ea5","Type":"ContainerDied","Data":"3e6210732d715ff37d595313a27167bd24d25866cf7af48c97dfd27770c5ba78"} Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.638704 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.665011 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfm79\" (UniqueName: \"kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79\") pod \"dnsmasq-dns-698758b865-g8fsv\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.766614 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.877196 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.940424 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config\") pod \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.940518 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc\") pod \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.940555 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgdxl\" (UniqueName: \"kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl\") pod \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.940648 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb\") pod \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\" (UID: \"4b1f88f0-e118-4ea3-861a-e505fc946ea5\") " Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.946889 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl" (OuterVolumeSpecName: "kube-api-access-vgdxl") pod "4b1f88f0-e118-4ea3-861a-e505fc946ea5" (UID: "4b1f88f0-e118-4ea3-861a-e505fc946ea5"). InnerVolumeSpecName "kube-api-access-vgdxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.995763 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b1f88f0-e118-4ea3-861a-e505fc946ea5" (UID: "4b1f88f0-e118-4ea3-861a-e505fc946ea5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:31 crc kubenswrapper[4888]: I1201 19:50:31.995948 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4b1f88f0-e118-4ea3-861a-e505fc946ea5" (UID: "4b1f88f0-e118-4ea3-861a-e505fc946ea5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.005395 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config" (OuterVolumeSpecName: "config") pod "4b1f88f0-e118-4ea3-861a-e505fc946ea5" (UID: "4b1f88f0-e118-4ea3-861a-e505fc946ea5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.042433 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.042471 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.042482 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b1f88f0-e118-4ea3-861a-e505fc946ea5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.042494 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgdxl\" (UniqueName: \"kubernetes.io/projected/4b1f88f0-e118-4ea3-861a-e505fc946ea5-kube-api-access-vgdxl\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.285217 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:50:32 crc kubenswrapper[4888]: W1201 19:50:32.289011 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb08bdea6_ae8a_4625_81ed_709c3cd10106.slice/crio-9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d WatchSource:0}: Error finding container 9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d: Status 404 returned error can't find the container with id 9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.511781 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.512506 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="dnsmasq-dns" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.512602 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="dnsmasq-dns" Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.512671 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="init" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.512914 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="init" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.513165 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" containerName="dnsmasq-dns" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.518155 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.522569 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.522576 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.522660 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.523040 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tbdvz" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.533332 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.584963 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.585041 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk6x7\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-kube-api-access-kk6x7\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.585097 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-lock\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.585120 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.585141 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-cache\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.645990 4888 generic.go:334] "Generic (PLEG): container finished" podID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerID="4c25f404b76e186a32c13d1499de66aa2c6f1d7a100c83209d0f02104263a489" exitCode=0 Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.646068 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-g8fsv" event={"ID":"b08bdea6-ae8a-4625-81ed-709c3cd10106","Type":"ContainerDied","Data":"4c25f404b76e186a32c13d1499de66aa2c6f1d7a100c83209d0f02104263a489"} Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.646438 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-g8fsv" event={"ID":"b08bdea6-ae8a-4625-81ed-709c3cd10106","Type":"ContainerStarted","Data":"9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d"} Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.649599 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" event={"ID":"4b1f88f0-e118-4ea3-861a-e505fc946ea5","Type":"ContainerDied","Data":"768f90e1d4f5c975c1bea8058c983447727af888677a7a8c3800730abcc7e81d"} Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.649654 4888 scope.go:117] "RemoveContainer" containerID="3e6210732d715ff37d595313a27167bd24d25866cf7af48c97dfd27770c5ba78" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.649670 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-xf7fh" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.682225 4888 scope.go:117] "RemoveContainer" containerID="6c1c1f95754b11f5399c40deed32ff6e955df31045ac91c6c50afce28c199229" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.687754 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.687958 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk6x7\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-kube-api-access-kk6x7\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.688124 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-lock\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.688208 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.688254 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-cache\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.689009 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-cache\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.689177 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.689213 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.689267 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:50:33.189245554 +0000 UTC m=+1033.060275468 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.689904 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/33b47f5a-af5b-41b4-9178-a956cd6d2101-lock\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.690301 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.696557 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.704764 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-xf7fh"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.713833 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk6x7\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-kube-api-access-kk6x7\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.731502 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.854111 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-lgqxg"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.855285 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.857975 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.858038 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.858313 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.861301 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-lgqxg"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.904723 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-lgqxg"] Dec 01 19:50:32 crc kubenswrapper[4888]: E1201 19:50:32.905094 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-gbhfs ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-gbhfs ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-lgqxg" podUID="acd422c4-5e82-48ac-8b89-d997e8cd50b2" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.914145 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-s6dcl"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.915167 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.947958 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s6dcl"] Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993105 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993151 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993204 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhxk9\" (UniqueName: \"kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993234 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993266 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993313 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993339 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbhfs\" (UniqueName: \"kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993373 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993389 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993415 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993438 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993488 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993513 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:32 crc kubenswrapper[4888]: I1201 19:50:32.993550 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095484 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095544 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095570 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095625 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095649 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095680 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095704 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095748 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhxk9\" (UniqueName: \"kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095764 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095788 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095821 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095838 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbhfs\" (UniqueName: \"kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.095867 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.097031 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.097209 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.097219 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.097236 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.097281 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.098744 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.099833 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.100891 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.100940 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.101150 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.101405 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.101820 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.111549 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhxk9\" (UniqueName: \"kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9\") pod \"swift-ring-rebalance-s6dcl\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.120995 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbhfs\" (UniqueName: \"kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs\") pod \"swift-ring-rebalance-lgqxg\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.197268 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:33 crc kubenswrapper[4888]: E1201 19:50:33.197519 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:33 crc kubenswrapper[4888]: E1201 19:50:33.197552 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:33 crc kubenswrapper[4888]: E1201 19:50:33.197614 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:50:34.197591511 +0000 UTC m=+1034.068621425 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.237378 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.662045 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-g8fsv" event={"ID":"b08bdea6-ae8a-4625-81ed-709c3cd10106","Type":"ContainerStarted","Data":"bf07f4a5bd735f3e794efa252bb5f438c65b24df2816ae05849a365ea9ae7435"} Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.663256 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.664884 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.675475 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.684761 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-g8fsv" podStartSLOduration=2.684735351 podStartE2EDuration="2.684735351s" podCreationTimestamp="2025-12-01 19:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:50:33.676769156 +0000 UTC m=+1033.547799100" watchObservedRunningTime="2025-12-01 19:50:33.684735351 +0000 UTC m=+1033.555765265" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812071 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812325 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812366 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812437 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812483 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812535 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812584 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbhfs\" (UniqueName: \"kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs\") pod \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\" (UID: \"acd422c4-5e82-48ac-8b89-d997e8cd50b2\") " Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.812856 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.813344 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts" (OuterVolumeSpecName: "scripts") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.814751 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.815211 4888 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.815237 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acd422c4-5e82-48ac-8b89-d997e8cd50b2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.815249 4888 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/acd422c4-5e82-48ac-8b89-d997e8cd50b2-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.820443 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.843845 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.855287 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.870372 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs" (OuterVolumeSpecName: "kube-api-access-gbhfs") pod "acd422c4-5e82-48ac-8b89-d997e8cd50b2" (UID: "acd422c4-5e82-48ac-8b89-d997e8cd50b2"). InnerVolumeSpecName "kube-api-access-gbhfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.917869 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.917901 4888 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.917911 4888 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/acd422c4-5e82-48ac-8b89-d997e8cd50b2-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:33 crc kubenswrapper[4888]: I1201 19:50:33.917921 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbhfs\" (UniqueName: \"kubernetes.io/projected/acd422c4-5e82-48ac-8b89-d997e8cd50b2-kube-api-access-gbhfs\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.206143 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-s6dcl"] Dec 01 19:50:34 crc kubenswrapper[4888]: W1201 19:50:34.210135 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2617e3a7_0ff0_4843_9126_a32cee9da7ca.slice/crio-f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc WatchSource:0}: Error finding container f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc: Status 404 returned error can't find the container with id f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.223216 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:34 crc kubenswrapper[4888]: E1201 19:50:34.223449 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:34 crc kubenswrapper[4888]: E1201 19:50:34.223484 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:34 crc kubenswrapper[4888]: E1201 19:50:34.223553 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:50:36.223528949 +0000 UTC m=+1036.094558863 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.437478 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-r72mz"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.438725 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.448289 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-r72mz"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.460967 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b1f88f0-e118-4ea3-861a-e505fc946ea5" path="/var/lib/kubelet/pods/4b1f88f0-e118-4ea3-861a-e505fc946ea5/volumes" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.508652 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-8194-account-create-update-djj8n"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.509640 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.511791 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.520152 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8194-account-create-update-djj8n"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.527736 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.527904 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6vjk\" (UniqueName: \"kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.630114 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b4zl\" (UniqueName: \"kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.630277 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6vjk\" (UniqueName: \"kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.630329 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.630388 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.631498 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.649004 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6vjk\" (UniqueName: \"kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk\") pod \"glance-db-create-r72mz\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.672548 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-lgqxg" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.672553 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6dcl" event={"ID":"2617e3a7-0ff0-4843-9126-a32cee9da7ca","Type":"ContainerStarted","Data":"f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc"} Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.732628 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.732792 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b4zl\" (UniqueName: \"kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.733257 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-lgqxg"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.733548 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.740576 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-lgqxg"] Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.754111 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b4zl\" (UniqueName: \"kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl\") pod \"glance-8194-account-create-update-djj8n\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.776138 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-r72mz" Dec 01 19:50:34 crc kubenswrapper[4888]: I1201 19:50:34.834636 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:35 crc kubenswrapper[4888]: I1201 19:50:35.225561 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-r72mz"] Dec 01 19:50:35 crc kubenswrapper[4888]: I1201 19:50:35.518454 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:35 crc kubenswrapper[4888]: I1201 19:50:35.536179 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-8194-account-create-update-djj8n"] Dec 01 19:50:35 crc kubenswrapper[4888]: W1201 19:50:35.548834 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64d69349_ac8d_448b_af24_f47de7982fba.slice/crio-74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668 WatchSource:0}: Error finding container 74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668: Status 404 returned error can't find the container with id 74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668 Dec 01 19:50:35 crc kubenswrapper[4888]: I1201 19:50:35.680828 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-r72mz" event={"ID":"78e1c942-cc18-4fb3-a287-137fb7b4f309","Type":"ContainerStarted","Data":"5b9b33ed52df90f84a0161bf187b584348dd2efd381d414f229132d5be4e2af7"} Dec 01 19:50:35 crc kubenswrapper[4888]: I1201 19:50:35.683570 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8194-account-create-update-djj8n" event={"ID":"64d69349-ac8d-448b-af24-f47de7982fba","Type":"ContainerStarted","Data":"74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668"} Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.263727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:36 crc kubenswrapper[4888]: E1201 19:50:36.263871 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:36 crc kubenswrapper[4888]: E1201 19:50:36.263897 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:36 crc kubenswrapper[4888]: E1201 19:50:36.263955 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:50:40.26393496 +0000 UTC m=+1040.134964874 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.460364 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acd422c4-5e82-48ac-8b89-d997e8cd50b2" path="/var/lib/kubelet/pods/acd422c4-5e82-48ac-8b89-d997e8cd50b2/volumes" Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.693685 4888 generic.go:334] "Generic (PLEG): container finished" podID="64d69349-ac8d-448b-af24-f47de7982fba" containerID="78c64fb384f96a6de358875dec7c8ec4dee6d03474f40db69ed960bdeb0f2646" exitCode=0 Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.693768 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8194-account-create-update-djj8n" event={"ID":"64d69349-ac8d-448b-af24-f47de7982fba","Type":"ContainerDied","Data":"78c64fb384f96a6de358875dec7c8ec4dee6d03474f40db69ed960bdeb0f2646"} Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.695586 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-r72mz" event={"ID":"78e1c942-cc18-4fb3-a287-137fb7b4f309","Type":"ContainerDied","Data":"24fd61513984406e5592a0ee025a31418e4b688ea2962e302f55e76bfb5657b1"} Dec 01 19:50:36 crc kubenswrapper[4888]: I1201 19:50:36.695512 4888 generic.go:334] "Generic (PLEG): container finished" podID="78e1c942-cc18-4fb3-a287-137fb7b4f309" containerID="24fd61513984406e5592a0ee025a31418e4b688ea2962e302f55e76bfb5657b1" exitCode=0 Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.236109 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-1d48-account-create-update-plfkf"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.238815 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.241093 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.242253 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-fvx9l"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.248854 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.255150 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fvx9l"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.267077 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1d48-account-create-update-plfkf"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.352428 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.352594 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m6sf\" (UniqueName: \"kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.352703 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frfwj\" (UniqueName: \"kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.352730 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.454576 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.454679 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m6sf\" (UniqueName: \"kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.454752 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frfwj\" (UniqueName: \"kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.454783 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.455687 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.456414 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.478804 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m6sf\" (UniqueName: \"kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf\") pod \"keystone-db-create-fvx9l\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.480977 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frfwj\" (UniqueName: \"kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj\") pod \"keystone-1d48-account-create-update-plfkf\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.565391 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.573555 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.945084 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-4fdb-account-create-update-7btll"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.946410 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.952154 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.953596 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-84cpp"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.987773 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4fdb-account-create-update-7btll"] Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.987897 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-84cpp" Dec 01 19:50:38 crc kubenswrapper[4888]: I1201 19:50:38.997260 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-84cpp"] Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.072228 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzsgs\" (UniqueName: \"kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.072926 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.174447 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.174524 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzsgs\" (UniqueName: \"kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.174561 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ssrs\" (UniqueName: \"kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.174640 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.175389 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.196702 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzsgs\" (UniqueName: \"kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs\") pod \"placement-4fdb-account-create-update-7btll\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.275975 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ssrs\" (UniqueName: \"kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.276053 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.277059 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.294273 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ssrs\" (UniqueName: \"kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs\") pod \"placement-db-create-84cpp\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.322840 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.333459 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-84cpp" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.736000 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-8194-account-create-update-djj8n" event={"ID":"64d69349-ac8d-448b-af24-f47de7982fba","Type":"ContainerDied","Data":"74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668"} Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.740302 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74e25cbc7288a5b714883b34336d9b37ea724acdeb94256dfa42534d97549668" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.740328 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-r72mz" event={"ID":"78e1c942-cc18-4fb3-a287-137fb7b4f309","Type":"ContainerDied","Data":"5b9b33ed52df90f84a0161bf187b584348dd2efd381d414f229132d5be4e2af7"} Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.740345 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b9b33ed52df90f84a0161bf187b584348dd2efd381d414f229132d5be4e2af7" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.736446 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-r72mz" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.742574 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.886975 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6vjk\" (UniqueName: \"kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk\") pod \"78e1c942-cc18-4fb3-a287-137fb7b4f309\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.887392 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts\") pod \"64d69349-ac8d-448b-af24-f47de7982fba\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.887488 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts\") pod \"78e1c942-cc18-4fb3-a287-137fb7b4f309\" (UID: \"78e1c942-cc18-4fb3-a287-137fb7b4f309\") " Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.887619 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b4zl\" (UniqueName: \"kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl\") pod \"64d69349-ac8d-448b-af24-f47de7982fba\" (UID: \"64d69349-ac8d-448b-af24-f47de7982fba\") " Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.890862 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "64d69349-ac8d-448b-af24-f47de7982fba" (UID: "64d69349-ac8d-448b-af24-f47de7982fba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.891422 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "78e1c942-cc18-4fb3-a287-137fb7b4f309" (UID: "78e1c942-cc18-4fb3-a287-137fb7b4f309"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.899433 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk" (OuterVolumeSpecName: "kube-api-access-c6vjk") pod "78e1c942-cc18-4fb3-a287-137fb7b4f309" (UID: "78e1c942-cc18-4fb3-a287-137fb7b4f309"). InnerVolumeSpecName "kube-api-access-c6vjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.903099 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl" (OuterVolumeSpecName: "kube-api-access-2b4zl") pod "64d69349-ac8d-448b-af24-f47de7982fba" (UID: "64d69349-ac8d-448b-af24-f47de7982fba"). InnerVolumeSpecName "kube-api-access-2b4zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.989473 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6vjk\" (UniqueName: \"kubernetes.io/projected/78e1c942-cc18-4fb3-a287-137fb7b4f309-kube-api-access-c6vjk\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.989526 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/64d69349-ac8d-448b-af24-f47de7982fba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.989536 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78e1c942-cc18-4fb3-a287-137fb7b4f309-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:39 crc kubenswrapper[4888]: I1201 19:50:39.989545 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b4zl\" (UniqueName: \"kubernetes.io/projected/64d69349-ac8d-448b-af24-f47de7982fba-kube-api-access-2b4zl\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.205882 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-1d48-account-create-update-plfkf"] Dec 01 19:50:40 crc kubenswrapper[4888]: W1201 19:50:40.209327 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84a8c1e9_3de0_4547_bbcb_5d4776b8ad2f.slice/crio-ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705 WatchSource:0}: Error finding container ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705: Status 404 returned error can't find the container with id ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705 Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.264039 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-84cpp"] Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.299195 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:40 crc kubenswrapper[4888]: E1201 19:50:40.299441 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:40 crc kubenswrapper[4888]: E1201 19:50:40.299472 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:40 crc kubenswrapper[4888]: E1201 19:50:40.299549 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:50:48.299529764 +0000 UTC m=+1048.170559678 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.329938 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4fdb-account-create-update-7btll"] Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.340721 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-fvx9l"] Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.752605 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4fdb-account-create-update-7btll" event={"ID":"5235c1b8-6bf6-485b-add3-05ef29c9178d","Type":"ContainerStarted","Data":"564d1d261114a78453be729def18ba57018554ea93c26cc4d8d2855edee35e8a"} Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.754460 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6dcl" event={"ID":"2617e3a7-0ff0-4843-9126-a32cee9da7ca","Type":"ContainerStarted","Data":"9606eb79c4e3779f7ecd30450d2f94687e675c90fc8bef231bf1d3ad082cb52f"} Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.756517 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fvx9l" event={"ID":"8167f98f-356f-4fae-8945-96e7c0ab8c47","Type":"ContainerStarted","Data":"e76bf250522f88314059d35ad4af7abdb92aff5b840aaea01bedfaf1ee05c7c7"} Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.762913 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-84cpp" event={"ID":"e9267a3d-f044-4529-b9f7-d7de6088819e","Type":"ContainerStarted","Data":"54ddaf7eb7471c4165733634bb8364b0bd3c625d314b53aebf1ed7aa0a261981"} Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.764128 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-8194-account-create-update-djj8n" Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.764934 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d48-account-create-update-plfkf" event={"ID":"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f","Type":"ContainerStarted","Data":"ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705"} Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.764991 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-r72mz" Dec 01 19:50:40 crc kubenswrapper[4888]: I1201 19:50:40.783461 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-s6dcl" podStartSLOduration=3.368967686 podStartE2EDuration="8.783442587s" podCreationTimestamp="2025-12-01 19:50:32 +0000 UTC" firstStartedPulling="2025-12-01 19:50:34.212532018 +0000 UTC m=+1034.083561932" lastFinishedPulling="2025-12-01 19:50:39.627006919 +0000 UTC m=+1039.498036833" observedRunningTime="2025-12-01 19:50:40.770573946 +0000 UTC m=+1040.641603870" watchObservedRunningTime="2025-12-01 19:50:40.783442587 +0000 UTC m=+1040.654472501" Dec 01 19:50:41 crc kubenswrapper[4888]: I1201 19:50:41.280297 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 01 19:50:41 crc kubenswrapper[4888]: I1201 19:50:41.767402 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:50:41 crc kubenswrapper[4888]: I1201 19:50:41.827637 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:41 crc kubenswrapper[4888]: I1201 19:50:41.827883 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="dnsmasq-dns" containerID="cri-o://393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b" gracePeriod=10 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.780163 4888 generic.go:334] "Generic (PLEG): container finished" podID="8167f98f-356f-4fae-8945-96e7c0ab8c47" containerID="dd64ea97811dc1e5b5fe3ad5721f00554f2380762733c3122d9094ea5d3b8714" exitCode=0 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.780220 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fvx9l" event={"ID":"8167f98f-356f-4fae-8945-96e7c0ab8c47","Type":"ContainerDied","Data":"dd64ea97811dc1e5b5fe3ad5721f00554f2380762733c3122d9094ea5d3b8714"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.781883 4888 generic.go:334] "Generic (PLEG): container finished" podID="e9267a3d-f044-4529-b9f7-d7de6088819e" containerID="a51f5095682b3c74f409fcc6a74e6849a8106fa25fbc5f67b3f302466c205227" exitCode=0 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.781999 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-84cpp" event={"ID":"e9267a3d-f044-4529-b9f7-d7de6088819e","Type":"ContainerDied","Data":"a51f5095682b3c74f409fcc6a74e6849a8106fa25fbc5f67b3f302466c205227"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.783270 4888 generic.go:334] "Generic (PLEG): container finished" podID="84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" containerID="4247c72604821509a70c03a465aafd101626086f032db0b47585d5b81fb6ba06" exitCode=0 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.783306 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d48-account-create-update-plfkf" event={"ID":"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f","Type":"ContainerDied","Data":"4247c72604821509a70c03a465aafd101626086f032db0b47585d5b81fb6ba06"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.784564 4888 generic.go:334] "Generic (PLEG): container finished" podID="5235c1b8-6bf6-485b-add3-05ef29c9178d" containerID="35034bb595f0a43e09c5e0a2b582b0a9c18910ad541a890e07892b3ca8002a4f" exitCode=0 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.784602 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4fdb-account-create-update-7btll" event={"ID":"5235c1b8-6bf6-485b-add3-05ef29c9178d","Type":"ContainerDied","Data":"35034bb595f0a43e09c5e0a2b582b0a9c18910ad541a890e07892b3ca8002a4f"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.785275 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.785973 4888 generic.go:334] "Generic (PLEG): container finished" podID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerID="393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b" exitCode=0 Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.786001 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" event={"ID":"5aaf257f-64d2-43c9-b470-b4c647c91654","Type":"ContainerDied","Data":"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.786017 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" event={"ID":"5aaf257f-64d2-43c9-b470-b4c647c91654","Type":"ContainerDied","Data":"455690b219df65d386fd3558df21435579e0098d5f63240aa0fa0a136d6e4052"} Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.786032 4888 scope.go:117] "RemoveContainer" containerID="393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.788640 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb\") pod \"5aaf257f-64d2-43c9-b470-b4c647c91654\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.788804 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config\") pod \"5aaf257f-64d2-43c9-b470-b4c647c91654\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.788833 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc\") pod \"5aaf257f-64d2-43c9-b470-b4c647c91654\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.788857 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nspjn\" (UniqueName: \"kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn\") pod \"5aaf257f-64d2-43c9-b470-b4c647c91654\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.788885 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb\") pod \"5aaf257f-64d2-43c9-b470-b4c647c91654\" (UID: \"5aaf257f-64d2-43c9-b470-b4c647c91654\") " Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.795691 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn" (OuterVolumeSpecName: "kube-api-access-nspjn") pod "5aaf257f-64d2-43c9-b470-b4c647c91654" (UID: "5aaf257f-64d2-43c9-b470-b4c647c91654"). InnerVolumeSpecName "kube-api-access-nspjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.851092 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config" (OuterVolumeSpecName: "config") pod "5aaf257f-64d2-43c9-b470-b4c647c91654" (UID: "5aaf257f-64d2-43c9-b470-b4c647c91654"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.874151 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5aaf257f-64d2-43c9-b470-b4c647c91654" (UID: "5aaf257f-64d2-43c9-b470-b4c647c91654"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.874809 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5aaf257f-64d2-43c9-b470-b4c647c91654" (UID: "5aaf257f-64d2-43c9-b470-b4c647c91654"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.887960 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5aaf257f-64d2-43c9-b470-b4c647c91654" (UID: "5aaf257f-64d2-43c9-b470-b4c647c91654"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.891615 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nspjn\" (UniqueName: \"kubernetes.io/projected/5aaf257f-64d2-43c9-b470-b4c647c91654-kube-api-access-nspjn\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.891650 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.891665 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.891679 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.891688 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5aaf257f-64d2-43c9-b470-b4c647c91654-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.896990 4888 scope.go:117] "RemoveContainer" containerID="e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.918647 4888 scope.go:117] "RemoveContainer" containerID="393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b" Dec 01 19:50:42 crc kubenswrapper[4888]: E1201 19:50:42.919082 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b\": container with ID starting with 393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b not found: ID does not exist" containerID="393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.919129 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b"} err="failed to get container status \"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b\": rpc error: code = NotFound desc = could not find container \"393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b\": container with ID starting with 393ae32e31175513200216f494b8a453df90fb76ca90c38215c96035a32a064b not found: ID does not exist" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.919166 4888 scope.go:117] "RemoveContainer" containerID="e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d" Dec 01 19:50:42 crc kubenswrapper[4888]: E1201 19:50:42.919792 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d\": container with ID starting with e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d not found: ID does not exist" containerID="e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d" Dec 01 19:50:42 crc kubenswrapper[4888]: I1201 19:50:42.919818 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d"} err="failed to get container status \"e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d\": rpc error: code = NotFound desc = could not find container \"e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d\": container with ID starting with e76486ceee24954820635e2028227984ac516f7df656a72f632991cf08d6d74d not found: ID does not exist" Dec 01 19:50:43 crc kubenswrapper[4888]: I1201 19:50:43.795784 4888 generic.go:334] "Generic (PLEG): container finished" podID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerID="c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d" exitCode=0 Dec 01 19:50:43 crc kubenswrapper[4888]: I1201 19:50:43.795839 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerDied","Data":"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d"} Dec 01 19:50:43 crc kubenswrapper[4888]: I1201 19:50:43.797081 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-r9wkq" Dec 01 19:50:43 crc kubenswrapper[4888]: I1201 19:50:43.975701 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:43 crc kubenswrapper[4888]: I1201 19:50:43.980387 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-r9wkq"] Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.076148 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5aaf257f_64d2_43c9_b470_b4c647c91654.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.462531 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" path="/var/lib/kubelet/pods/5aaf257f-64d2-43c9-b470-b4c647c91654/volumes" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.583733 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709042 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-vcl88"] Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.709386 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e1c942-cc18-4fb3-a287-137fb7b4f309" containerName="mariadb-database-create" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709402 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e1c942-cc18-4fb3-a287-137fb7b4f309" containerName="mariadb-database-create" Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.709419 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="dnsmasq-dns" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709426 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="dnsmasq-dns" Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.709442 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d69349-ac8d-448b-af24-f47de7982fba" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709448 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d69349-ac8d-448b-af24-f47de7982fba" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.709465 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="init" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709490 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="init" Dec 01 19:50:44 crc kubenswrapper[4888]: E1201 19:50:44.709513 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709519 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709697 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5aaf257f-64d2-43c9-b470-b4c647c91654" containerName="dnsmasq-dns" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709910 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e1c942-cc18-4fb3-a287-137fb7b4f309" containerName="mariadb-database-create" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.709942 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.710037 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d69349-ac8d-448b-af24-f47de7982fba" containerName="mariadb-account-create-update" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.710997 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.712733 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jllkx" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.712916 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.718472 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vcl88"] Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.734753 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts\") pod \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.734857 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frfwj\" (UniqueName: \"kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj\") pod \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\" (UID: \"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.735904 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" (UID: "84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.740398 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj" (OuterVolumeSpecName: "kube-api-access-frfwj") pod "84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" (UID: "84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f"). InnerVolumeSpecName "kube-api-access-frfwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.760146 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.766877 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-84cpp" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.770133 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.812325 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-84cpp" event={"ID":"e9267a3d-f044-4529-b9f7-d7de6088819e","Type":"ContainerDied","Data":"54ddaf7eb7471c4165733634bb8364b0bd3c625d314b53aebf1ed7aa0a261981"} Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.812372 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54ddaf7eb7471c4165733634bb8364b0bd3c625d314b53aebf1ed7aa0a261981" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.812435 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-84cpp" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.814057 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-1d48-account-create-update-plfkf" event={"ID":"84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f","Type":"ContainerDied","Data":"ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705"} Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.814083 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac48137e649da346aa66fb497904bd8458677ca0f229594014588fd2f5c60705" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.814131 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-1d48-account-create-update-plfkf" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.820101 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4fdb-account-create-update-7btll" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.820105 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4fdb-account-create-update-7btll" event={"ID":"5235c1b8-6bf6-485b-add3-05ef29c9178d","Type":"ContainerDied","Data":"564d1d261114a78453be729def18ba57018554ea93c26cc4d8d2855edee35e8a"} Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.820232 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="564d1d261114a78453be729def18ba57018554ea93c26cc4d8d2855edee35e8a" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.823138 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-fvx9l" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.823137 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-fvx9l" event={"ID":"8167f98f-356f-4fae-8945-96e7c0ab8c47","Type":"ContainerDied","Data":"e76bf250522f88314059d35ad4af7abdb92aff5b840aaea01bedfaf1ee05c7c7"} Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.823305 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e76bf250522f88314059d35ad4af7abdb92aff5b840aaea01bedfaf1ee05c7c7" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.825412 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerStarted","Data":"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0"} Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.826370 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837051 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmb6c\" (UniqueName: \"kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837136 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837160 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837221 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837331 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.837345 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frfwj\" (UniqueName: \"kubernetes.io/projected/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f-kube-api-access-frfwj\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938418 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts\") pod \"e9267a3d-f044-4529-b9f7-d7de6088819e\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938495 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzsgs\" (UniqueName: \"kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs\") pod \"5235c1b8-6bf6-485b-add3-05ef29c9178d\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938518 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m6sf\" (UniqueName: \"kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf\") pod \"8167f98f-356f-4fae-8945-96e7c0ab8c47\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938538 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts\") pod \"5235c1b8-6bf6-485b-add3-05ef29c9178d\" (UID: \"5235c1b8-6bf6-485b-add3-05ef29c9178d\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938586 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ssrs\" (UniqueName: \"kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs\") pod \"e9267a3d-f044-4529-b9f7-d7de6088819e\" (UID: \"e9267a3d-f044-4529-b9f7-d7de6088819e\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938634 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts\") pod \"8167f98f-356f-4fae-8945-96e7c0ab8c47\" (UID: \"8167f98f-356f-4fae-8945-96e7c0ab8c47\") " Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938894 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938926 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.938970 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.939053 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmb6c\" (UniqueName: \"kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.939682 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e9267a3d-f044-4529-b9f7-d7de6088819e" (UID: "e9267a3d-f044-4529-b9f7-d7de6088819e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.940575 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8167f98f-356f-4fae-8945-96e7c0ab8c47" (UID: "8167f98f-356f-4fae-8945-96e7c0ab8c47"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.941662 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5235c1b8-6bf6-485b-add3-05ef29c9178d" (UID: "5235c1b8-6bf6-485b-add3-05ef29c9178d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944053 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944059 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944113 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs" (OuterVolumeSpecName: "kube-api-access-8ssrs") pod "e9267a3d-f044-4529-b9f7-d7de6088819e" (UID: "e9267a3d-f044-4529-b9f7-d7de6088819e"). InnerVolumeSpecName "kube-api-access-8ssrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944397 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf" (OuterVolumeSpecName: "kube-api-access-2m6sf") pod "8167f98f-356f-4fae-8945-96e7c0ab8c47" (UID: "8167f98f-356f-4fae-8945-96e7c0ab8c47"). InnerVolumeSpecName "kube-api-access-2m6sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944590 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.944830 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs" (OuterVolumeSpecName: "kube-api-access-pzsgs") pod "5235c1b8-6bf6-485b-add3-05ef29c9178d" (UID: "5235c1b8-6bf6-485b-add3-05ef29c9178d"). InnerVolumeSpecName "kube-api-access-pzsgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:44 crc kubenswrapper[4888]: I1201 19:50:44.959602 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmb6c\" (UniqueName: \"kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c\") pod \"glance-db-sync-vcl88\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.060771 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9267a3d-f044-4529-b9f7-d7de6088819e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.061123 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzsgs\" (UniqueName: \"kubernetes.io/projected/5235c1b8-6bf6-485b-add3-05ef29c9178d-kube-api-access-pzsgs\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.061203 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m6sf\" (UniqueName: \"kubernetes.io/projected/8167f98f-356f-4fae-8945-96e7c0ab8c47-kube-api-access-2m6sf\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.061260 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5235c1b8-6bf6-485b-add3-05ef29c9178d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.061330 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ssrs\" (UniqueName: \"kubernetes.io/projected/e9267a3d-f044-4529-b9f7-d7de6088819e-kube-api-access-8ssrs\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.061384 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8167f98f-356f-4fae-8945-96e7c0ab8c47-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.091094 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vcl88" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.630529 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.514788879 podStartE2EDuration="1m1.630505066s" podCreationTimestamp="2025-12-01 19:49:44 +0000 UTC" firstStartedPulling="2025-12-01 19:49:46.797853246 +0000 UTC m=+986.668883160" lastFinishedPulling="2025-12-01 19:50:09.913569433 +0000 UTC m=+1009.784599347" observedRunningTime="2025-12-01 19:50:44.849344244 +0000 UTC m=+1044.720374178" watchObservedRunningTime="2025-12-01 19:50:45.630505066 +0000 UTC m=+1045.501534990" Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.641799 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-vcl88"] Dec 01 19:50:45 crc kubenswrapper[4888]: W1201 19:50:45.645035 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb85ed1be_1f93_4247_ae07_5c08ecbb6802.slice/crio-1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535 WatchSource:0}: Error finding container 1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535: Status 404 returned error can't find the container with id 1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535 Dec 01 19:50:45 crc kubenswrapper[4888]: I1201 19:50:45.847545 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vcl88" event={"ID":"b85ed1be-1f93-4247-ae07-5c08ecbb6802","Type":"ContainerStarted","Data":"1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535"} Dec 01 19:50:48 crc kubenswrapper[4888]: I1201 19:50:48.357019 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:50:48 crc kubenswrapper[4888]: E1201 19:50:48.357241 4888 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 19:50:48 crc kubenswrapper[4888]: E1201 19:50:48.357273 4888 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 19:50:48 crc kubenswrapper[4888]: E1201 19:50:48.357340 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift podName:33b47f5a-af5b-41b4-9178-a956cd6d2101 nodeName:}" failed. No retries permitted until 2025-12-01 19:51:04.357322335 +0000 UTC m=+1064.228352239 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift") pod "swift-storage-0" (UID: "33b47f5a-af5b-41b4-9178-a956cd6d2101") : configmap "swift-ring-files" not found Dec 01 19:50:48 crc kubenswrapper[4888]: I1201 19:50:48.892834 4888 generic.go:334] "Generic (PLEG): container finished" podID="2617e3a7-0ff0-4843-9126-a32cee9da7ca" containerID="9606eb79c4e3779f7ecd30450d2f94687e675c90fc8bef231bf1d3ad082cb52f" exitCode=0 Dec 01 19:50:48 crc kubenswrapper[4888]: I1201 19:50:48.892938 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6dcl" event={"ID":"2617e3a7-0ff0-4843-9126-a32cee9da7ca","Type":"ContainerDied","Data":"9606eb79c4e3779f7ecd30450d2f94687e675c90fc8bef231bf1d3ad082cb52f"} Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.671614 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.678379 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.678569 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.678655 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.678759 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.678927 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.679043 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhxk9\" (UniqueName: \"kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.679119 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf\") pod \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\" (UID: \"2617e3a7-0ff0-4843-9126-a32cee9da7ca\") " Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.681254 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.682275 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.707554 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9" (OuterVolumeSpecName: "kube-api-access-hhxk9") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "kube-api-access-hhxk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.708049 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.726100 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.726727 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts" (OuterVolumeSpecName: "scripts") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.756372 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2617e3a7-0ff0-4843-9126-a32cee9da7ca" (UID: "2617e3a7-0ff0-4843-9126-a32cee9da7ca"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783221 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhxk9\" (UniqueName: \"kubernetes.io/projected/2617e3a7-0ff0-4843-9126-a32cee9da7ca-kube-api-access-hhxk9\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783261 4888 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783275 4888 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783286 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2617e3a7-0ff0-4843-9126-a32cee9da7ca-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783299 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783310 4888 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2617e3a7-0ff0-4843-9126-a32cee9da7ca-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:50 crc kubenswrapper[4888]: I1201 19:50:50.783320 4888 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2617e3a7-0ff0-4843-9126-a32cee9da7ca-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 19:50:51 crc kubenswrapper[4888]: I1201 19:50:51.048718 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-s6dcl" event={"ID":"2617e3a7-0ff0-4843-9126-a32cee9da7ca","Type":"ContainerDied","Data":"f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc"} Dec 01 19:50:51 crc kubenswrapper[4888]: I1201 19:50:51.049133 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8f568b5eee91f8d74b460e7a93226f6661c90152ea879ac3936fb0d4bd0a6dc" Dec 01 19:50:51 crc kubenswrapper[4888]: I1201 19:50:51.049242 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-s6dcl" Dec 01 19:50:53 crc kubenswrapper[4888]: I1201 19:50:53.534086 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-b4v8q" podUID="644ca96f-aee4-40b9-957b-b18e28634a66" containerName="ovn-controller" probeResult="failure" output=< Dec 01 19:50:53 crc kubenswrapper[4888]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 19:50:53 crc kubenswrapper[4888]: > Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.092157 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.099254 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-9rcwl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.691145 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-b4v8q-config-7s4bl"] Dec 01 19:50:54 crc kubenswrapper[4888]: E1201 19:50:54.692594 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8167f98f-356f-4fae-8945-96e7c0ab8c47" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.692670 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8167f98f-356f-4fae-8945-96e7c0ab8c47" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: E1201 19:50:54.692744 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9267a3d-f044-4529-b9f7-d7de6088819e" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.692802 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9267a3d-f044-4529-b9f7-d7de6088819e" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: E1201 19:50:54.692878 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5235c1b8-6bf6-485b-add3-05ef29c9178d" containerName="mariadb-account-create-update" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.692954 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5235c1b8-6bf6-485b-add3-05ef29c9178d" containerName="mariadb-account-create-update" Dec 01 19:50:54 crc kubenswrapper[4888]: E1201 19:50:54.693060 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2617e3a7-0ff0-4843-9126-a32cee9da7ca" containerName="swift-ring-rebalance" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.693133 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2617e3a7-0ff0-4843-9126-a32cee9da7ca" containerName="swift-ring-rebalance" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.693465 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5235c1b8-6bf6-485b-add3-05ef29c9178d" containerName="mariadb-account-create-update" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.693562 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2617e3a7-0ff0-4843-9126-a32cee9da7ca" containerName="swift-ring-rebalance" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.693635 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="8167f98f-356f-4fae-8945-96e7c0ab8c47" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.693716 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9267a3d-f044-4529-b9f7-d7de6088819e" containerName="mariadb-database-create" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.694551 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.700582 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.709170 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b4v8q-config-7s4bl"] Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.889512 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.889830 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.889850 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5c9x\" (UniqueName: \"kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.889882 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.889907 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.890234 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.996138 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.992373 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.996489 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.996523 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5c9x\" (UniqueName: \"kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.996636 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.996729 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.997138 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.997720 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.998617 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.998709 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:54 crc kubenswrapper[4888]: I1201 19:50:54.998754 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:55 crc kubenswrapper[4888]: I1201 19:50:55.025000 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5c9x\" (UniqueName: \"kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x\") pod \"ovn-controller-b4v8q-config-7s4bl\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:55 crc kubenswrapper[4888]: I1201 19:50:55.054877 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:50:55 crc kubenswrapper[4888]: I1201 19:50:55.638777 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:50:56 crc kubenswrapper[4888]: I1201 19:50:56.097947 4888 generic.go:334] "Generic (PLEG): container finished" podID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerID="db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33" exitCode=0 Dec 01 19:50:56 crc kubenswrapper[4888]: I1201 19:50:56.098053 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerDied","Data":"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33"} Dec 01 19:50:58 crc kubenswrapper[4888]: I1201 19:50:58.585773 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-b4v8q" podUID="644ca96f-aee4-40b9-957b-b18e28634a66" containerName="ovn-controller" probeResult="failure" output=< Dec 01 19:50:58 crc kubenswrapper[4888]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 19:50:58 crc kubenswrapper[4888]: > Dec 01 19:51:03 crc kubenswrapper[4888]: I1201 19:51:03.565071 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-b4v8q" podUID="644ca96f-aee4-40b9-957b-b18e28634a66" containerName="ovn-controller" probeResult="failure" output=< Dec 01 19:51:03 crc kubenswrapper[4888]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 19:51:03 crc kubenswrapper[4888]: > Dec 01 19:51:04 crc kubenswrapper[4888]: I1201 19:51:04.376992 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:51:04 crc kubenswrapper[4888]: I1201 19:51:04.394102 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/33b47f5a-af5b-41b4-9178-a956cd6d2101-etc-swift\") pod \"swift-storage-0\" (UID: \"33b47f5a-af5b-41b4-9178-a956cd6d2101\") " pod="openstack/swift-storage-0" Dec 01 19:51:04 crc kubenswrapper[4888]: I1201 19:51:04.404882 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 19:51:05 crc kubenswrapper[4888]: E1201 19:51:05.204761 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 01 19:51:05 crc kubenswrapper[4888]: E1201 19:51:05.205378 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cmb6c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-vcl88_openstack(b85ed1be-1f93-4247-ae07-5c08ecbb6802): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:51:05 crc kubenswrapper[4888]: E1201 19:51:05.206475 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-vcl88" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" Dec 01 19:51:05 crc kubenswrapper[4888]: I1201 19:51:05.731770 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-b4v8q-config-7s4bl"] Dec 01 19:51:05 crc kubenswrapper[4888]: W1201 19:51:05.747020 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb4dff5c_b796_4b09_acdd_2472c4d22e76.slice/crio-19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c WatchSource:0}: Error finding container 19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c: Status 404 returned error can't find the container with id 19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c Dec 01 19:51:05 crc kubenswrapper[4888]: I1201 19:51:05.814297 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 19:51:05 crc kubenswrapper[4888]: W1201 19:51:05.818420 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33b47f5a_af5b_41b4_9178_a956cd6d2101.slice/crio-d80f9a150e0e447c3b9532d8dd3143fdc6eea0347d697890d2e2ba53035323ac WatchSource:0}: Error finding container d80f9a150e0e447c3b9532d8dd3143fdc6eea0347d697890d2e2ba53035323ac: Status 404 returned error can't find the container with id d80f9a150e0e447c3b9532d8dd3143fdc6eea0347d697890d2e2ba53035323ac Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.242215 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q-config-7s4bl" event={"ID":"fb4dff5c-b796-4b09-acdd-2472c4d22e76","Type":"ContainerStarted","Data":"1aa75780a4b1c0ba06164d7973e7b01ac1e1f5db67213e7f2ebc07e2732ae2d7"} Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.242680 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q-config-7s4bl" event={"ID":"fb4dff5c-b796-4b09-acdd-2472c4d22e76","Type":"ContainerStarted","Data":"19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c"} Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.249715 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerStarted","Data":"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd"} Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.250111 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.253749 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"d80f9a150e0e447c3b9532d8dd3143fdc6eea0347d697890d2e2ba53035323ac"} Dec 01 19:51:06 crc kubenswrapper[4888]: E1201 19:51:06.255216 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-vcl88" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.267521 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-b4v8q-config-7s4bl" podStartSLOduration=12.267503975 podStartE2EDuration="12.267503975s" podCreationTimestamp="2025-12-01 19:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:06.261119801 +0000 UTC m=+1066.132149715" watchObservedRunningTime="2025-12-01 19:51:06.267503975 +0000 UTC m=+1066.138533879" Dec 01 19:51:06 crc kubenswrapper[4888]: I1201 19:51:06.305376 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371953.549425 podStartE2EDuration="1m23.305350349s" podCreationTimestamp="2025-12-01 19:49:43 +0000 UTC" firstStartedPulling="2025-12-01 19:49:46.188088106 +0000 UTC m=+986.059118030" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:06.294382647 +0000 UTC m=+1066.165412591" watchObservedRunningTime="2025-12-01 19:51:06.305350349 +0000 UTC m=+1066.176380253" Dec 01 19:51:07 crc kubenswrapper[4888]: I1201 19:51:07.265281 4888 generic.go:334] "Generic (PLEG): container finished" podID="fb4dff5c-b796-4b09-acdd-2472c4d22e76" containerID="1aa75780a4b1c0ba06164d7973e7b01ac1e1f5db67213e7f2ebc07e2732ae2d7" exitCode=0 Dec 01 19:51:07 crc kubenswrapper[4888]: I1201 19:51:07.265389 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q-config-7s4bl" event={"ID":"fb4dff5c-b796-4b09-acdd-2472c4d22e76","Type":"ContainerDied","Data":"1aa75780a4b1c0ba06164d7973e7b01ac1e1f5db67213e7f2ebc07e2732ae2d7"} Dec 01 19:51:08 crc kubenswrapper[4888]: I1201 19:51:08.279509 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"5b5549149e41b0e42406c950de445f78025ec6da000d34db6dd1a939d7db2372"} Dec 01 19:51:08 crc kubenswrapper[4888]: I1201 19:51:08.281306 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"de5636cacca75014e8e3fb1a1f8837d62c5013a28de0ebb65a05875e54cf456a"} Dec 01 19:51:08 crc kubenswrapper[4888]: I1201 19:51:08.281381 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"86232ab54361b27e9290496416842809ebc348cfb9178697c7eaed31c94c910e"} Dec 01 19:51:08 crc kubenswrapper[4888]: I1201 19:51:08.281449 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"f2c556945898db893d531e50ae0438feb1ca7a40f66bcc466b24932e695914b6"} Dec 01 19:51:08 crc kubenswrapper[4888]: I1201 19:51:08.771975 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-b4v8q" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.063968 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.258811 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259008 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5c9x\" (UniqueName: \"kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259069 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259095 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259155 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259245 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts\") pod \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\" (UID: \"fb4dff5c-b796-4b09-acdd-2472c4d22e76\") " Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.259960 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.260020 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run" (OuterVolumeSpecName: "var-run") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.260050 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.261107 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.261388 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts" (OuterVolumeSpecName: "scripts") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.268467 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x" (OuterVolumeSpecName: "kube-api-access-b5c9x") pod "fb4dff5c-b796-4b09-acdd-2472c4d22e76" (UID: "fb4dff5c-b796-4b09-acdd-2472c4d22e76"). InnerVolumeSpecName "kube-api-access-b5c9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.305898 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-b4v8q-config-7s4bl" event={"ID":"fb4dff5c-b796-4b09-acdd-2472c4d22e76","Type":"ContainerDied","Data":"19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c"} Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.305962 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19fe06bfdbf5ee9ff0e3c9a594af7746dce9da1573273b7c421611f3fe94494c" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.306052 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-b4v8q-config-7s4bl" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362675 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5c9x\" (UniqueName: \"kubernetes.io/projected/fb4dff5c-b796-4b09-acdd-2472c4d22e76-kube-api-access-b5c9x\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362726 4888 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362738 4888 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362750 4888 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362763 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fb4dff5c-b796-4b09-acdd-2472c4d22e76-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:09 crc kubenswrapper[4888]: I1201 19:51:09.362772 4888 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fb4dff5c-b796-4b09-acdd-2472c4d22e76-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.200307 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-b4v8q-config-7s4bl"] Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.209989 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-b4v8q-config-7s4bl"] Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.327846 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"6478d73101942e9a514ce85e17deb7146e4e9da3554c0f7253a97ea5139f6b1c"} Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.328787 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"91365befcb099e6dac478ca1739a57617417d1155d10bc94b3d44afbc20ba368"} Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.329334 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"16c82f8d5a53a897516a6052eab5b60179f05aabbeb95cf054f06e1e2b13d9fa"} Dec 01 19:51:10 crc kubenswrapper[4888]: I1201 19:51:10.470319 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb4dff5c-b796-4b09-acdd-2472c4d22e76" path="/var/lib/kubelet/pods/fb4dff5c-b796-4b09-acdd-2472c4d22e76/volumes" Dec 01 19:51:11 crc kubenswrapper[4888]: I1201 19:51:11.352620 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"fbf66e7e2dcebe835847ae442b0f1e0223de44dcd0b1d78c5e21fa4c93fe13d1"} Dec 01 19:51:12 crc kubenswrapper[4888]: I1201 19:51:12.365978 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"e8257b8b938f250d623810076a89dadcce73831d07a335dd6e46797e1f697a66"} Dec 01 19:51:12 crc kubenswrapper[4888]: I1201 19:51:12.366472 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"d4e49aebb679e8c2586842ef068f68364e0945e9bbeeffec6102661515ab3359"} Dec 01 19:51:13 crc kubenswrapper[4888]: I1201 19:51:13.384903 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"d748005b1d580f94ea8fb31bc49e97d4a5a0dfa57eb43159c2cad63dfb99f7ca"} Dec 01 19:51:13 crc kubenswrapper[4888]: I1201 19:51:13.385507 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"c5c4f6aac5816b3839d7eaa5abedd18f616a831edfbb62a0dafd8695a4f79dbf"} Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.440530 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"91bb8af8095a0a88c87b7d085c8f8e216cad4081375dd0e44796641675489020"} Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.440623 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"f03692ff4f5abbc4205206906b52878ece1cc1f3b40dd8d2891d7241d462f3ab"} Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.440643 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"33b47f5a-af5b-41b4-9178-a956cd6d2101","Type":"ContainerStarted","Data":"6661ee9a250155a706adc8f8a68d01c881967b06155a96851257a20b4b3e3ff9"} Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.516995 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=37.55509504 podStartE2EDuration="43.516963956s" podCreationTimestamp="2025-12-01 19:50:31 +0000 UTC" firstStartedPulling="2025-12-01 19:51:05.821699873 +0000 UTC m=+1065.692729787" lastFinishedPulling="2025-12-01 19:51:11.783568789 +0000 UTC m=+1071.654598703" observedRunningTime="2025-12-01 19:51:14.510470789 +0000 UTC m=+1074.381500723" watchObservedRunningTime="2025-12-01 19:51:14.516963956 +0000 UTC m=+1074.387993880" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.876584 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:51:14 crc kubenswrapper[4888]: E1201 19:51:14.877217 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb4dff5c-b796-4b09-acdd-2472c4d22e76" containerName="ovn-config" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.877245 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb4dff5c-b796-4b09-acdd-2472c4d22e76" containerName="ovn-config" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.877617 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb4dff5c-b796-4b09-acdd-2472c4d22e76" containerName="ovn-config" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.878969 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.881032 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.889174 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973698 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973777 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973809 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973849 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973931 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:14 crc kubenswrapper[4888]: I1201 19:51:14.973971 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdc22\" (UniqueName: \"kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.075972 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.076064 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.076095 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.076146 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.076214 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.076247 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdc22\" (UniqueName: \"kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.077631 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.077780 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.077890 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.078149 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.078665 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.100460 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdc22\" (UniqueName: \"kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22\") pod \"dnsmasq-dns-764c5664d7-n5tgx\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.201441 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.358045 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.742089 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-jlw2t"] Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.743114 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.758105 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-jlw2t"] Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.855455 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp664\" (UniqueName: \"kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.855623 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.865642 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-cd73-account-create-update-qrpvz"] Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.867039 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.870231 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 01 19:51:15 crc kubenswrapper[4888]: I1201 19:51:15.889584 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cd73-account-create-update-qrpvz"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.957378 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp664\" (UniqueName: \"kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.957523 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjqt2\" (UniqueName: \"kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.957569 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.957785 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.958889 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:15.980353 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp664\" (UniqueName: \"kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664\") pod \"cinder-db-create-jlw2t\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.054532 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-mnpjt"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.055748 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.060170 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjqt2\" (UniqueName: \"kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.060229 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.060982 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.071372 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mnpjt"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.083912 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjqt2\" (UniqueName: \"kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2\") pod \"cinder-cd73-account-create-update-qrpvz\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.175592 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-2dggz"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.176104 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x69nr\" (UniqueName: \"kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.176220 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.177223 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.232163 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2dggz"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.266298 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-e18c-account-create-update-pvcwx"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.268667 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.294278 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.294619 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.298859 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300157 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x69nr\" (UniqueName: \"kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300297 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4rn4\" (UniqueName: \"kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300333 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300379 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300412 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.300462 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chvmm\" (UniqueName: \"kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.303268 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.376197 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e18c-account-create-update-pvcwx"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.378513 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x69nr\" (UniqueName: \"kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr\") pod \"barbican-db-create-mnpjt\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.403639 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4rn4\" (UniqueName: \"kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.403718 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.403748 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.403792 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chvmm\" (UniqueName: \"kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.405042 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.405689 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.443582 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chvmm\" (UniqueName: \"kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm\") pod \"barbican-e18c-account-create-update-pvcwx\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.447855 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4rn4\" (UniqueName: \"kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4\") pod \"neutron-db-create-2dggz\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.534934 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.535987 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-97e5-account-create-update-4qnck"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.538045 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.542046 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.555082 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-97e5-account-create-update-4qnck"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.563550 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-2l5hv"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.564248 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.571899 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.580385 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2l5hv"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.589927 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.590130 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.590256 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-skcf5" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.590561 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.629986 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.645046 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.770459 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.770506 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d264g\" (UniqueName: \"kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.770550 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.770573 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.770638 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs58x\" (UniqueName: \"kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.873098 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs58x\" (UniqueName: \"kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.873210 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.873257 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d264g\" (UniqueName: \"kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.873321 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.873356 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.876156 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.879999 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.887567 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.906383 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d264g\" (UniqueName: \"kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g\") pod \"keystone-db-sync-2l5hv\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:16 crc kubenswrapper[4888]: I1201 19:51:16.930949 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs58x\" (UniqueName: \"kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x\") pod \"neutron-97e5-account-create-update-4qnck\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.190023 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.205474 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.309344 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-cd73-account-create-update-qrpvz"] Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.478053 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" event={"ID":"605da06d-22a2-423c-8acd-7fd22fcbc0b8","Type":"ContainerStarted","Data":"863b1f3f1627a2e6389b26e0940c36d9045f396bd268304c24197db52a8aeb81"} Dec 01 19:51:17 crc kubenswrapper[4888]: W1201 19:51:17.535832 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b WatchSource:0}: Error finding container 49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b: Status 404 returned error can't find the container with id 49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.568259 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-jlw2t"] Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.651781 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-mnpjt"] Dec 01 19:51:17 crc kubenswrapper[4888]: W1201 19:51:17.701585 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a350005_42be_4ad0_8996_3ac3a5808a79.slice/crio-9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e WatchSource:0}: Error finding container 9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e: Status 404 returned error can't find the container with id 9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.704818 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2dggz"] Dec 01 19:51:17 crc kubenswrapper[4888]: W1201 19:51:17.722764 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67be08f6_80ec_4816_8670_40bd598ac820.slice/crio-d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a WatchSource:0}: Error finding container d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a: Status 404 returned error can't find the container with id d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a Dec 01 19:51:17 crc kubenswrapper[4888]: I1201 19:51:17.823164 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-97e5-account-create-update-4qnck"] Dec 01 19:51:17 crc kubenswrapper[4888]: W1201 19:51:17.843064 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57fd71b5_5871_4427_9c7f_3c0c7b9fa47a.slice/crio-d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742 WatchSource:0}: Error finding container d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742: Status 404 returned error can't find the container with id d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.000398 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-e18c-account-create-update-pvcwx"] Dec 01 19:51:18 crc kubenswrapper[4888]: W1201 19:51:18.135915 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod843bf3c4_e095_466a_b4f3_5f48b85dc179.slice/crio-11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449 WatchSource:0}: Error finding container 11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449: Status 404 returned error can't find the container with id 11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.138390 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2l5hv"] Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.502245 4888 generic.go:334] "Generic (PLEG): container finished" podID="7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" containerID="ac2c12e010d8904e43ed20af8c8b8879f366820cb82bb208beb41726d26bdd66" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.502373 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e18c-account-create-update-pvcwx" event={"ID":"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933","Type":"ContainerDied","Data":"ac2c12e010d8904e43ed20af8c8b8879f366820cb82bb208beb41726d26bdd66"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.502747 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e18c-account-create-update-pvcwx" event={"ID":"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933","Type":"ContainerStarted","Data":"b47e8da3a45d3c90038759e9275f625c6b94af2ea33602bc415f9897e1948674"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.503967 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2l5hv" event={"ID":"843bf3c4-e095-466a-b4f3-5f48b85dc179","Type":"ContainerStarted","Data":"11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.505553 4888 generic.go:334] "Generic (PLEG): container finished" podID="57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" containerID="accb72ab357e4e7848bb0fcfae7958c63777bce22ec2d68469d45283c4c17a5d" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.505617 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97e5-account-create-update-4qnck" event={"ID":"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a","Type":"ContainerDied","Data":"accb72ab357e4e7848bb0fcfae7958c63777bce22ec2d68469d45283c4c17a5d"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.505639 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97e5-account-create-update-4qnck" event={"ID":"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a","Type":"ContainerStarted","Data":"d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.508229 4888 generic.go:334] "Generic (PLEG): container finished" podID="1a350005-42be-4ad0-8996-3ac3a5808a79" containerID="e5a864fdabd4c395f3d4230c08f3326ac1f41f695be82a7f9ecbc2d15ca4aea1" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.508301 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mnpjt" event={"ID":"1a350005-42be-4ad0-8996-3ac3a5808a79","Type":"ContainerDied","Data":"e5a864fdabd4c395f3d4230c08f3326ac1f41f695be82a7f9ecbc2d15ca4aea1"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.508344 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mnpjt" event={"ID":"1a350005-42be-4ad0-8996-3ac3a5808a79","Type":"ContainerStarted","Data":"9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.510365 4888 generic.go:334] "Generic (PLEG): container finished" podID="7f794204-9db5-498d-b8a5-586ec3b9f921" containerID="61d5e9e2419743db0065411b117b4b013b8e88201d7f82768646ee2c97dc6fb6" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.510516 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd73-account-create-update-qrpvz" event={"ID":"7f794204-9db5-498d-b8a5-586ec3b9f921","Type":"ContainerDied","Data":"61d5e9e2419743db0065411b117b4b013b8e88201d7f82768646ee2c97dc6fb6"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.510543 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd73-account-create-update-qrpvz" event={"ID":"7f794204-9db5-498d-b8a5-586ec3b9f921","Type":"ContainerStarted","Data":"49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.512350 4888 generic.go:334] "Generic (PLEG): container finished" podID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerID="bfdbe00013de156dba963ac18f2d58e39f5c3f2308f2e8d00b43f8606980ebbd" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.512398 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" event={"ID":"605da06d-22a2-423c-8acd-7fd22fcbc0b8","Type":"ContainerDied","Data":"bfdbe00013de156dba963ac18f2d58e39f5c3f2308f2e8d00b43f8606980ebbd"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.520566 4888 generic.go:334] "Generic (PLEG): container finished" podID="67be08f6-80ec-4816-8670-40bd598ac820" containerID="381b9f5fcda2723e8eadc8faa24fe3a22785c1e9654d452b34e61c8817b8deda" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.520647 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2dggz" event={"ID":"67be08f6-80ec-4816-8670-40bd598ac820","Type":"ContainerDied","Data":"381b9f5fcda2723e8eadc8faa24fe3a22785c1e9654d452b34e61c8817b8deda"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.520684 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2dggz" event={"ID":"67be08f6-80ec-4816-8670-40bd598ac820","Type":"ContainerStarted","Data":"d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.526474 4888 generic.go:334] "Generic (PLEG): container finished" podID="37be0244-881b-469e-9dd3-5f9d5b38f042" containerID="67dc3451df3b27852e504d44702309c8e4676e6f7b8ad029fa8d57c8c216ae88" exitCode=0 Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.526564 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jlw2t" event={"ID":"37be0244-881b-469e-9dd3-5f9d5b38f042","Type":"ContainerDied","Data":"67dc3451df3b27852e504d44702309c8e4676e6f7b8ad029fa8d57c8c216ae88"} Dec 01 19:51:18 crc kubenswrapper[4888]: I1201 19:51:18.526612 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jlw2t" event={"ID":"37be0244-881b-469e-9dd3-5f9d5b38f042","Type":"ContainerStarted","Data":"0ab39337ece614d5690e02e28c18adf3ef81107f7a42b579733e189f792bf4b9"} Dec 01 19:51:19 crc kubenswrapper[4888]: I1201 19:51:19.546744 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" event={"ID":"605da06d-22a2-423c-8acd-7fd22fcbc0b8","Type":"ContainerStarted","Data":"2c5380c2edff747804a73caa67786e13477916226ba82d01b99d7a8beed8f670"} Dec 01 19:51:19 crc kubenswrapper[4888]: I1201 19:51:19.574100 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podStartSLOduration=5.57408335 podStartE2EDuration="5.57408335s" podCreationTimestamp="2025-12-01 19:51:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:19.566808253 +0000 UTC m=+1079.437838187" watchObservedRunningTime="2025-12-01 19:51:19.57408335 +0000 UTC m=+1079.445113264" Dec 01 19:51:19 crc kubenswrapper[4888]: I1201 19:51:19.978215 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.014670 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts\") pod \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.014792 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs58x\" (UniqueName: \"kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x\") pod \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\" (UID: \"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.016034 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" (UID: "57fd71b5-5871-4427-9c7f-3c0c7b9fa47a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.023352 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x" (OuterVolumeSpecName: "kube-api-access-bs58x") pod "57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" (UID: "57fd71b5-5871-4427-9c7f-3c0c7b9fa47a"). InnerVolumeSpecName "kube-api-access-bs58x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.037588 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.037993 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.116228 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.116265 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs58x\" (UniqueName: \"kubernetes.io/projected/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a-kube-api-access-bs58x\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.156814 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.163085 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.172139 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.188000 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.202541 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.206577 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321487 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chvmm\" (UniqueName: \"kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm\") pod \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321644 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4rn4\" (UniqueName: \"kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4\") pod \"67be08f6-80ec-4816-8670-40bd598ac820\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321697 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts\") pod \"37be0244-881b-469e-9dd3-5f9d5b38f042\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321743 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x69nr\" (UniqueName: \"kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr\") pod \"1a350005-42be-4ad0-8996-3ac3a5808a79\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321787 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp664\" (UniqueName: \"kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664\") pod \"37be0244-881b-469e-9dd3-5f9d5b38f042\" (UID: \"37be0244-881b-469e-9dd3-5f9d5b38f042\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321872 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjqt2\" (UniqueName: \"kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2\") pod \"7f794204-9db5-498d-b8a5-586ec3b9f921\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321936 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts\") pod \"1a350005-42be-4ad0-8996-3ac3a5808a79\" (UID: \"1a350005-42be-4ad0-8996-3ac3a5808a79\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321965 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts\") pod \"67be08f6-80ec-4816-8670-40bd598ac820\" (UID: \"67be08f6-80ec-4816-8670-40bd598ac820\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.321993 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts\") pod \"7f794204-9db5-498d-b8a5-586ec3b9f921\" (UID: \"7f794204-9db5-498d-b8a5-586ec3b9f921\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322071 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts\") pod \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\" (UID: \"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933\") " Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322390 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "37be0244-881b-469e-9dd3-5f9d5b38f042" (UID: "37be0244-881b-469e-9dd3-5f9d5b38f042"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322768 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "67be08f6-80ec-4816-8670-40bd598ac820" (UID: "67be08f6-80ec-4816-8670-40bd598ac820"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322808 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a350005-42be-4ad0-8996-3ac3a5808a79" (UID: "1a350005-42be-4ad0-8996-3ac3a5808a79"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322925 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67be08f6-80ec-4816-8670-40bd598ac820-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322942 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/37be0244-881b-469e-9dd3-5f9d5b38f042-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.322932 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" (UID: "7a21e9bd-e339-4d46-aaed-1ed1d4cfe933"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.323566 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7f794204-9db5-498d-b8a5-586ec3b9f921" (UID: "7f794204-9db5-498d-b8a5-586ec3b9f921"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.328828 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr" (OuterVolumeSpecName: "kube-api-access-x69nr") pod "1a350005-42be-4ad0-8996-3ac3a5808a79" (UID: "1a350005-42be-4ad0-8996-3ac3a5808a79"). InnerVolumeSpecName "kube-api-access-x69nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.329008 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4" (OuterVolumeSpecName: "kube-api-access-n4rn4") pod "67be08f6-80ec-4816-8670-40bd598ac820" (UID: "67be08f6-80ec-4816-8670-40bd598ac820"). InnerVolumeSpecName "kube-api-access-n4rn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.329056 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2" (OuterVolumeSpecName: "kube-api-access-jjqt2") pod "7f794204-9db5-498d-b8a5-586ec3b9f921" (UID: "7f794204-9db5-498d-b8a5-586ec3b9f921"). InnerVolumeSpecName "kube-api-access-jjqt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.329106 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm" (OuterVolumeSpecName: "kube-api-access-chvmm") pod "7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" (UID: "7a21e9bd-e339-4d46-aaed-1ed1d4cfe933"). InnerVolumeSpecName "kube-api-access-chvmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.333428 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664" (OuterVolumeSpecName: "kube-api-access-rp664") pod "37be0244-881b-469e-9dd3-5f9d5b38f042" (UID: "37be0244-881b-469e-9dd3-5f9d5b38f042"). InnerVolumeSpecName "kube-api-access-rp664". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426259 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chvmm\" (UniqueName: \"kubernetes.io/projected/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-kube-api-access-chvmm\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426296 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4rn4\" (UniqueName: \"kubernetes.io/projected/67be08f6-80ec-4816-8670-40bd598ac820-kube-api-access-n4rn4\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426311 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x69nr\" (UniqueName: \"kubernetes.io/projected/1a350005-42be-4ad0-8996-3ac3a5808a79-kube-api-access-x69nr\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426323 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp664\" (UniqueName: \"kubernetes.io/projected/37be0244-881b-469e-9dd3-5f9d5b38f042-kube-api-access-rp664\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426333 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjqt2\" (UniqueName: \"kubernetes.io/projected/7f794204-9db5-498d-b8a5-586ec3b9f921-kube-api-access-jjqt2\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426342 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a350005-42be-4ad0-8996-3ac3a5808a79-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426351 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7f794204-9db5-498d-b8a5-586ec3b9f921-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.426361 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.566110 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-jlw2t" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.566170 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-jlw2t" event={"ID":"37be0244-881b-469e-9dd3-5f9d5b38f042","Type":"ContainerDied","Data":"0ab39337ece614d5690e02e28c18adf3ef81107f7a42b579733e189f792bf4b9"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.566758 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ab39337ece614d5690e02e28c18adf3ef81107f7a42b579733e189f792bf4b9" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.569698 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-e18c-account-create-update-pvcwx" event={"ID":"7a21e9bd-e339-4d46-aaed-1ed1d4cfe933","Type":"ContainerDied","Data":"b47e8da3a45d3c90038759e9275f625c6b94af2ea33602bc415f9897e1948674"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.569780 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-e18c-account-create-update-pvcwx" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.569861 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b47e8da3a45d3c90038759e9275f625c6b94af2ea33602bc415f9897e1948674" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.571843 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-97e5-account-create-update-4qnck" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.571911 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-97e5-account-create-update-4qnck" event={"ID":"57fd71b5-5871-4427-9c7f-3c0c7b9fa47a","Type":"ContainerDied","Data":"d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.572711 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3d3daa2cf17e16a0522e43ea765fc0573139bbc1b3cf2a39510ddc24f3b8742" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.578114 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-mnpjt" event={"ID":"1a350005-42be-4ad0-8996-3ac3a5808a79","Type":"ContainerDied","Data":"9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.578165 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9118e06b98c761e0f5406bb65dfc5a1bddbba6e047ed6e156d1135363d56ef5e" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.578352 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-mnpjt" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.581952 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-cd73-account-create-update-qrpvz" event={"ID":"7f794204-9db5-498d-b8a5-586ec3b9f921","Type":"ContainerDied","Data":"49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.582013 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.582706 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-cd73-account-create-update-qrpvz" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.584358 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vcl88" event={"ID":"b85ed1be-1f93-4247-ae07-5c08ecbb6802","Type":"ContainerStarted","Data":"707dce5fdcba6fbb943150e7273574bbd155231f2307395a44e8268309d81a79"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.596231 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2dggz" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.596686 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2dggz" event={"ID":"67be08f6-80ec-4816-8670-40bd598ac820","Type":"ContainerDied","Data":"d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a"} Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.596731 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d95d930099ef6d6f4e9fa41d4c4195dc530524babc590b8d0444de2a2249024a" Dec 01 19:51:20 crc kubenswrapper[4888]: I1201 19:51:20.628016 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-vcl88" podStartSLOduration=3.047371985 podStartE2EDuration="36.62799072s" podCreationTimestamp="2025-12-01 19:50:44 +0000 UTC" firstStartedPulling="2025-12-01 19:50:45.647708459 +0000 UTC m=+1045.518738373" lastFinishedPulling="2025-12-01 19:51:19.228327194 +0000 UTC m=+1079.099357108" observedRunningTime="2025-12-01 19:51:20.615886649 +0000 UTC m=+1080.486916563" watchObservedRunningTime="2025-12-01 19:51:20.62799072 +0000 UTC m=+1080.499020634" Dec 01 19:51:24 crc kubenswrapper[4888]: I1201 19:51:24.804815 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2l5hv" event={"ID":"843bf3c4-e095-466a-b4f3-5f48b85dc179","Type":"ContainerStarted","Data":"66e3f4fbc0dd1e564ab2c0d652366503e8cf58d4b765568a9ba240822941543a"} Dec 01 19:51:24 crc kubenswrapper[4888]: I1201 19:51:24.840562 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-2l5hv" podStartSLOduration=3.098039891 podStartE2EDuration="8.840529691s" podCreationTimestamp="2025-12-01 19:51:16 +0000 UTC" firstStartedPulling="2025-12-01 19:51:18.138612712 +0000 UTC m=+1078.009642626" lastFinishedPulling="2025-12-01 19:51:23.881102512 +0000 UTC m=+1083.752132426" observedRunningTime="2025-12-01 19:51:24.836378705 +0000 UTC m=+1084.707408609" watchObservedRunningTime="2025-12-01 19:51:24.840529691 +0000 UTC m=+1084.711559615" Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.202531 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:51:25 crc kubenswrapper[4888]: E1201 19:51:25.240513 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.300539 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.300905 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-g8fsv" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="dnsmasq-dns" containerID="cri-o://bf07f4a5bd735f3e794efa252bb5f438c65b24df2816ae05849a365ea9ae7435" gracePeriod=10 Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.822366 4888 generic.go:334] "Generic (PLEG): container finished" podID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerID="bf07f4a5bd735f3e794efa252bb5f438c65b24df2816ae05849a365ea9ae7435" exitCode=0 Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.823083 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-g8fsv" event={"ID":"b08bdea6-ae8a-4625-81ed-709c3cd10106","Type":"ContainerDied","Data":"bf07f4a5bd735f3e794efa252bb5f438c65b24df2816ae05849a365ea9ae7435"} Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.823150 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-g8fsv" event={"ID":"b08bdea6-ae8a-4625-81ed-709c3cd10106","Type":"ContainerDied","Data":"9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d"} Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.823171 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f5da2259574f50ce5ae9d8cfc20f10d2555318a0c126fefbcae119e4b37b45d" Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.892225 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.953689 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc\") pod \"b08bdea6-ae8a-4625-81ed-709c3cd10106\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.953831 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb\") pod \"b08bdea6-ae8a-4625-81ed-709c3cd10106\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.953972 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config\") pod \"b08bdea6-ae8a-4625-81ed-709c3cd10106\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.954111 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfm79\" (UniqueName: \"kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79\") pod \"b08bdea6-ae8a-4625-81ed-709c3cd10106\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.954145 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb\") pod \"b08bdea6-ae8a-4625-81ed-709c3cd10106\" (UID: \"b08bdea6-ae8a-4625-81ed-709c3cd10106\") " Dec 01 19:51:25 crc kubenswrapper[4888]: I1201 19:51:25.979817 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79" (OuterVolumeSpecName: "kube-api-access-mfm79") pod "b08bdea6-ae8a-4625-81ed-709c3cd10106" (UID: "b08bdea6-ae8a-4625-81ed-709c3cd10106"). InnerVolumeSpecName "kube-api-access-mfm79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.034854 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b08bdea6-ae8a-4625-81ed-709c3cd10106" (UID: "b08bdea6-ae8a-4625-81ed-709c3cd10106"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.038286 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b08bdea6-ae8a-4625-81ed-709c3cd10106" (UID: "b08bdea6-ae8a-4625-81ed-709c3cd10106"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.056181 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.056248 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.056265 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfm79\" (UniqueName: \"kubernetes.io/projected/b08bdea6-ae8a-4625-81ed-709c3cd10106-kube-api-access-mfm79\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.056680 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b08bdea6-ae8a-4625-81ed-709c3cd10106" (UID: "b08bdea6-ae8a-4625-81ed-709c3cd10106"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.059689 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config" (OuterVolumeSpecName: "config") pod "b08bdea6-ae8a-4625-81ed-709c3cd10106" (UID: "b08bdea6-ae8a-4625-81ed-709c3cd10106"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.157821 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.157888 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b08bdea6-ae8a-4625-81ed-709c3cd10106-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.833373 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-g8fsv" Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.856849 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:51:26 crc kubenswrapper[4888]: I1201 19:51:26.865663 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-g8fsv"] Dec 01 19:51:27 crc kubenswrapper[4888]: I1201 19:51:27.846826 4888 generic.go:334] "Generic (PLEG): container finished" podID="843bf3c4-e095-466a-b4f3-5f48b85dc179" containerID="66e3f4fbc0dd1e564ab2c0d652366503e8cf58d4b765568a9ba240822941543a" exitCode=0 Dec 01 19:51:27 crc kubenswrapper[4888]: I1201 19:51:27.846892 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2l5hv" event={"ID":"843bf3c4-e095-466a-b4f3-5f48b85dc179","Type":"ContainerDied","Data":"66e3f4fbc0dd1e564ab2c0d652366503e8cf58d4b765568a9ba240822941543a"} Dec 01 19:51:28 crc kubenswrapper[4888]: I1201 19:51:28.464371 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" path="/var/lib/kubelet/pods/b08bdea6-ae8a-4625-81ed-709c3cd10106/volumes" Dec 01 19:51:28 crc kubenswrapper[4888]: I1201 19:51:28.857955 4888 generic.go:334] "Generic (PLEG): container finished" podID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" containerID="707dce5fdcba6fbb943150e7273574bbd155231f2307395a44e8268309d81a79" exitCode=0 Dec 01 19:51:28 crc kubenswrapper[4888]: I1201 19:51:28.858070 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vcl88" event={"ID":"b85ed1be-1f93-4247-ae07-5c08ecbb6802","Type":"ContainerDied","Data":"707dce5fdcba6fbb943150e7273574bbd155231f2307395a44e8268309d81a79"} Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.316856 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vcl88" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.458788 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data\") pod \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.458923 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmb6c\" (UniqueName: \"kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c\") pod \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.458957 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data\") pod \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.459144 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle\") pod \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\" (UID: \"b85ed1be-1f93-4247-ae07-5c08ecbb6802\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.467000 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b85ed1be-1f93-4247-ae07-5c08ecbb6802" (UID: "b85ed1be-1f93-4247-ae07-5c08ecbb6802"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.470465 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c" (OuterVolumeSpecName: "kube-api-access-cmb6c") pod "b85ed1be-1f93-4247-ae07-5c08ecbb6802" (UID: "b85ed1be-1f93-4247-ae07-5c08ecbb6802"). InnerVolumeSpecName "kube-api-access-cmb6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.495505 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b85ed1be-1f93-4247-ae07-5c08ecbb6802" (UID: "b85ed1be-1f93-4247-ae07-5c08ecbb6802"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.526295 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data" (OuterVolumeSpecName: "config-data") pod "b85ed1be-1f93-4247-ae07-5c08ecbb6802" (UID: "b85ed1be-1f93-4247-ae07-5c08ecbb6802"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.563248 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.563304 4888 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.563321 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmb6c\" (UniqueName: \"kubernetes.io/projected/b85ed1be-1f93-4247-ae07-5c08ecbb6802-kube-api-access-cmb6c\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.563335 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b85ed1be-1f93-4247-ae07-5c08ecbb6802-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.783272 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.884496 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2l5hv" event={"ID":"843bf3c4-e095-466a-b4f3-5f48b85dc179","Type":"ContainerDied","Data":"11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449"} Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.884558 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11864904d7c17ff1974ec7da569fd035778f4580d70d31f546f8686856901449" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.884557 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2l5hv" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.889536 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-vcl88" event={"ID":"b85ed1be-1f93-4247-ae07-5c08ecbb6802","Type":"ContainerDied","Data":"1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535"} Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.889600 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ebff9ebd4d752516d4a5253c96ead9307842b796420880b9b24ce185997e535" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.889613 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-vcl88" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.969448 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data\") pod \"843bf3c4-e095-466a-b4f3-5f48b85dc179\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.969499 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle\") pod \"843bf3c4-e095-466a-b4f3-5f48b85dc179\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.969610 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d264g\" (UniqueName: \"kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g\") pod \"843bf3c4-e095-466a-b4f3-5f48b85dc179\" (UID: \"843bf3c4-e095-466a-b4f3-5f48b85dc179\") " Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.978050 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g" (OuterVolumeSpecName: "kube-api-access-d264g") pod "843bf3c4-e095-466a-b4f3-5f48b85dc179" (UID: "843bf3c4-e095-466a-b4f3-5f48b85dc179"). InnerVolumeSpecName "kube-api-access-d264g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:30 crc kubenswrapper[4888]: I1201 19:51:30.999382 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "843bf3c4-e095-466a-b4f3-5f48b85dc179" (UID: "843bf3c4-e095-466a-b4f3-5f48b85dc179"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.023970 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data" (OuterVolumeSpecName: "config-data") pod "843bf3c4-e095-466a-b4f3-5f48b85dc179" (UID: "843bf3c4-e095-466a-b4f3-5f48b85dc179"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.072485 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.072526 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d264g\" (UniqueName: \"kubernetes.io/projected/843bf3c4-e095-466a-b4f3-5f48b85dc179-kube-api-access-d264g\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.072540 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/843bf3c4-e095-466a-b4f3-5f48b85dc179-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.369537 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370140 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" containerName="glance-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370161 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" containerName="glance-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370223 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="dnsmasq-dns" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370233 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="dnsmasq-dns" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370244 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="init" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370252 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="init" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370264 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a350005-42be-4ad0-8996-3ac3a5808a79" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370272 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a350005-42be-4ad0-8996-3ac3a5808a79" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370284 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f794204-9db5-498d-b8a5-586ec3b9f921" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370291 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f794204-9db5-498d-b8a5-586ec3b9f921" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370302 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67be08f6-80ec-4816-8670-40bd598ac820" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370310 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="67be08f6-80ec-4816-8670-40bd598ac820" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37be0244-881b-469e-9dd3-5f9d5b38f042" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370338 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="37be0244-881b-469e-9dd3-5f9d5b38f042" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370351 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370360 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370387 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370395 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: E1201 19:51:31.370414 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="843bf3c4-e095-466a-b4f3-5f48b85dc179" containerName="keystone-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370423 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="843bf3c4-e095-466a-b4f3-5f48b85dc179" containerName="keystone-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370649 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="67be08f6-80ec-4816-8670-40bd598ac820" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370670 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="843bf3c4-e095-466a-b4f3-5f48b85dc179" containerName="keystone-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370682 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b08bdea6-ae8a-4625-81ed-709c3cd10106" containerName="dnsmasq-dns" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370694 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370706 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f794204-9db5-498d-b8a5-586ec3b9f921" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370719 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" containerName="mariadb-account-create-update" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370731 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" containerName="glance-db-sync" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370749 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a350005-42be-4ad0-8996-3ac3a5808a79" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.370760 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="37be0244-881b-469e-9dd3-5f9d5b38f042" containerName="mariadb-database-create" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.380886 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.386669 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.479328 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.479898 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.479945 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.479980 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.480011 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.480149 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w58ww\" (UniqueName: \"kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582381 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582453 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582502 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582542 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582597 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w58ww\" (UniqueName: \"kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.582672 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.584948 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.585088 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.585314 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.585347 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.585341 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.619132 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w58ww\" (UniqueName: \"kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww\") pod \"dnsmasq-dns-74f6bcbc87-p5g9j\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:31 crc kubenswrapper[4888]: I1201 19:51:31.710257 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.121738 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-8692b"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.123630 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.135605 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.135818 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.135942 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.134480 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8692b"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.153746 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.160815 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-skcf5" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.187303 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.237177 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-r7br8"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.239211 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.269721 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-r7br8"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.320119 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.320592 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hg22\" (UniqueName: \"kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.320742 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.320903 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.321075 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.321296 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.361973 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.370300 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.376636 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-7v4xh" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.381305 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.382485 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.383929 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.388709 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.423893 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.423988 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424018 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424062 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hg22\" (UniqueName: \"kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424098 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424120 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424172 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424215 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424244 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424280 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnfxl\" (UniqueName: \"kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424301 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.424334 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.442419 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.443351 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.443545 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.444747 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.465931 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.499090 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hg22\" (UniqueName: \"kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22\") pod \"keystone-bootstrap-8692b\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.516873 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.524285 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-xk69h"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526080 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526124 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnfxl\" (UniqueName: \"kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526153 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526227 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526264 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526279 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526299 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526316 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c8z4\" (UniqueName: \"kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526345 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526361 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.526389 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.527329 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.527411 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.532048 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.532854 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.537362 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.540321 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.551005 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-5fpcd" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.551364 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.572603 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xk69h"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.599405 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-r7br8"] Dec 01 19:51:32 crc kubenswrapper[4888]: E1201 19:51:32.602536 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-lnfxl], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-847c4cc679-r7br8" podUID="d5400d50-b319-48e5-8ec0-13982bb79160" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.629410 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-tnc9l"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.630932 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.633336 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.634461 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.634579 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btz6w\" (UniqueName: \"kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.634682 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.634770 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnfxl\" (UniqueName: \"kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl\") pod \"dnsmasq-dns-847c4cc679-r7br8\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.634914 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.635002 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.635091 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.635163 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c8z4\" (UniqueName: \"kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.635275 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.636216 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.636422 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.637380 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.641956 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-dkpkt" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.642399 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.670739 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.676481 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-8dw7s"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.678503 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.689862 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.690510 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2j785" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.697101 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c8z4\" (UniqueName: \"kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4\") pod \"horizon-7d745c59d5-8h7kr\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.697183 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-tnc9l"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.697545 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.708465 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737535 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737615 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btz6w\" (UniqueName: \"kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737660 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737714 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737751 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737789 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737887 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.737987 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.738021 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbf64\" (UniqueName: \"kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.749413 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.754590 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.777527 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-m7g88"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.780424 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.784116 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btz6w\" (UniqueName: \"kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w\") pod \"barbican-db-sync-xk69h\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.785098 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.790856 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-ggfk2" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.791021 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.801335 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.805174 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8dw7s"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.842966 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph9l2\" (UniqueName: \"kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843105 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843285 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843322 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843456 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843500 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbf64\" (UniqueName: \"kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843704 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843779 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843865 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.843973 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.844048 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.848422 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.848476 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.850754 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.856333 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.863091 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.863621 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.866591 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.885997 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbf64\" (UniqueName: \"kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64\") pod \"cinder-db-sync-tnc9l\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.947952 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948044 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948076 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948128 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w9mm\" (UniqueName: \"kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948186 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948243 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948301 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948338 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948389 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948426 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgccl\" (UniqueName: \"kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948477 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948508 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948586 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph9l2\" (UniqueName: \"kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.948644 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.950680 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.954549 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xk69h" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.956129 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m7g88"] Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.973813 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.977021 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.988258 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.993556 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" event={"ID":"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb","Type":"ContainerStarted","Data":"2f50efb7cc6927fc35ca72d97f1aa81851b8dffaf70a575eb75716b8d10a9335"} Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.993867 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:32 crc kubenswrapper[4888]: I1201 19:51:32.993949 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.012608 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph9l2\" (UniqueName: \"kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2\") pod \"placement-db-sync-8dw7s\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.031043 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.035826 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8dw7s" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.036396 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.050903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.050978 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgccl\" (UniqueName: \"kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051035 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051064 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051150 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051172 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051220 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w9mm\" (UniqueName: \"kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051254 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.051293 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.052512 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.053331 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.054249 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.054358 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.054909 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.062040 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.064336 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.071439 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.074471 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.086281 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.117954 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w9mm\" (UniqueName: \"kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm\") pod \"dnsmasq-dns-785d8bcb8c-67pd6\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.158148 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgccl\" (UniqueName: \"kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl\") pod \"neutron-db-sync-m7g88\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.169240 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.187858 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.197512 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.197886 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.209995 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.263919 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.263999 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264039 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264089 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264181 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnfxl\" (UniqueName: \"kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264237 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config\") pod \"d5400d50-b319-48e5-8ec0-13982bb79160\" (UID: \"d5400d50-b319-48e5-8ec0-13982bb79160\") " Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264542 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264571 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7svrf\" (UniqueName: \"kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264612 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264641 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.264732 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.266869 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config" (OuterVolumeSpecName: "config") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.267359 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.267759 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.269572 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.277663 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.281343 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.283439 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.319589 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl" (OuterVolumeSpecName: "kube-api-access-lnfxl") pod "d5400d50-b319-48e5-8ec0-13982bb79160" (UID: "d5400d50-b319-48e5-8ec0-13982bb79160"). InnerVolumeSpecName "kube-api-access-lnfxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.320115 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.320320 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jllkx" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.320460 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.330084 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366371 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366455 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366487 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366504 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366543 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366572 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366597 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366614 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7svrf\" (UniqueName: \"kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366644 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366667 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366707 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbkzn\" (UniqueName: \"kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366728 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366769 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366780 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366789 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366798 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366808 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnfxl\" (UniqueName: \"kubernetes.io/projected/d5400d50-b319-48e5-8ec0-13982bb79160-kube-api-access-lnfxl\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.366820 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d5400d50-b319-48e5-8ec0-13982bb79160-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.382477 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.383232 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.383740 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.395044 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.419103 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7svrf\" (UniqueName: \"kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf\") pod \"horizon-7f76bc979c-6tnbl\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.420048 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.440896 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m7g88" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468284 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468848 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468911 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbkzn\" (UniqueName: \"kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468939 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468963 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.468989 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469018 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469047 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469078 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469102 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469125 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469181 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469232 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdg2f\" (UniqueName: \"kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469253 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.469972 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.475852 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.479199 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.499109 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.499287 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.499967 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbkzn\" (UniqueName: \"kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.503369 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.512983 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data\") pod \"ceilometer-0\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.515175 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571561 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571653 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571802 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdg2f\" (UniqueName: \"kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571872 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571896 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571949 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.571990 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.572597 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.572879 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.574384 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.599800 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.608537 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.618402 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.622997 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.630870 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.642169 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.649255 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.651712 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.654413 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.661058 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdg2f\" (UniqueName: \"kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f\") pod \"glance-default-external-api-0\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.689741 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.776847 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.776921 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.777259 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.777387 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.777465 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.777541 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.777589 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.888631 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.888762 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.888832 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.888895 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.888957 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.889233 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.889290 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.889389 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.890464 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.890714 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.895362 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.902348 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.904713 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.913264 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.938651 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:33 crc kubenswrapper[4888]: I1201 19:51:33.993865 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8692b"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.032297 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-r7br8" Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.036525 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerStarted","Data":"45212ede3d182e371b32064366b11462a16cd097822fde25c8725d0ca263f562"} Dec 01 19:51:34 crc kubenswrapper[4888]: W1201 19:51:34.042682 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ee1004c_9319_4275_9123_49e393196c06.slice/crio-9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1 WatchSource:0}: Error finding container 9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1: Status 404 returned error can't find the container with id 9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1 Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.046440 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.120303 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-r7br8"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.156416 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-r7br8"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.188357 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xk69h"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.214901 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-8dw7s"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.239232 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-tnc9l"] Dec 01 19:51:34 crc kubenswrapper[4888]: W1201 19:51:34.388959 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice/crio-e1c7e993c0b297f9fc9735b13f03274dc72873246ef446a20717ead68795580e WatchSource:0}: Error finding container e1c7e993c0b297f9fc9735b13f03274dc72873246ef446a20717ead68795580e: Status 404 returned error can't find the container with id e1c7e993c0b297f9fc9735b13f03274dc72873246ef446a20717ead68795580e Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.394462 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.406983 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-m7g88"] Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.512652 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5400d50-b319-48e5-8ec0-13982bb79160" path="/var/lib/kubelet/pods/d5400d50-b319-48e5-8ec0-13982bb79160/volumes" Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.610806 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:51:34 crc kubenswrapper[4888]: W1201 19:51:34.634338 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f7bb90_fce7_4f77_a9d0_eaa5089e4c6a.slice/crio-26b42ecf1e757466771ad9b17d813c2f73e0e2849aa0e90695b4ec226ab4afb7 WatchSource:0}: Error finding container 26b42ecf1e757466771ad9b17d813c2f73e0e2849aa0e90695b4ec226ab4afb7: Status 404 returned error can't find the container with id 26b42ecf1e757466771ad9b17d813c2f73e0e2849aa0e90695b4ec226ab4afb7 Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.741654 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:51:34 crc kubenswrapper[4888]: W1201 19:51:34.774910 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9a19cb_f999_49ca_89f8_e5ab13e453a4.slice/crio-11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b WatchSource:0}: Error finding container 11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b: Status 404 returned error can't find the container with id 11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b Dec 01 19:51:34 crc kubenswrapper[4888]: I1201 19:51:34.861625 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:34 crc kubenswrapper[4888]: W1201 19:51:34.883023 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a86ecdc_69f3_43be_adae_8b8a94dd7f73.slice/crio-22574cc9d19f6ea6210356e364e6cbaf619760b17fd375ec46009428a8208913 WatchSource:0}: Error finding container 22574cc9d19f6ea6210356e364e6cbaf619760b17fd375ec46009428a8208913: Status 404 returned error can't find the container with id 22574cc9d19f6ea6210356e364e6cbaf619760b17fd375ec46009428a8208913 Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.058215 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8692b" event={"ID":"2ee1004c-9319-4275-9123-49e393196c06","Type":"ContainerStarted","Data":"5cc9313e30870523146105d1e43acd46a476c4390f8804e246937d98c1acc895"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.058309 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8692b" event={"ID":"2ee1004c-9319-4275-9123-49e393196c06","Type":"ContainerStarted","Data":"9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.069229 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m7g88" event={"ID":"b19201ab-fb2d-4011-aa2f-a078153687d1","Type":"ContainerStarted","Data":"503c0e5e504a0a5afdd90463196b3c8820f4b717bc53ecb3ecebc6692ecf33bd"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.069293 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m7g88" event={"ID":"b19201ab-fb2d-4011-aa2f-a078153687d1","Type":"ContainerStarted","Data":"86763ff365ccd297610ad21233a06e3075f9260b8117adf54b9585bf054c5ff3"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.089492 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerStarted","Data":"dbf6fc0958d95528a55edf2df8c8def0e9328adbadc5e96e16bbcb3cb235cdf5"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.090685 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerStarted","Data":"e1c7e993c0b297f9fc9735b13f03274dc72873246ef446a20717ead68795580e"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.098570 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-8692b" podStartSLOduration=3.098546239 podStartE2EDuration="3.098546239s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:35.087108185 +0000 UTC m=+1094.958138099" watchObservedRunningTime="2025-12-01 19:51:35.098546239 +0000 UTC m=+1094.969576153" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.107279 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8dw7s" event={"ID":"c4b06642-351e-4bc5-b48e-ab8b6ddf750c","Type":"ContainerStarted","Data":"cda573797983fc2610e9ea6c782a6fe94ed8b6a0659f2bb18110416c8aab2dda"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.117254 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerStarted","Data":"22574cc9d19f6ea6210356e364e6cbaf619760b17fd375ec46009428a8208913"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.122084 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xk69h" event={"ID":"047835ab-4a66-4ff8-9252-c9c5ca0d0352","Type":"ContainerStarted","Data":"8afd10da97b986c38c5725978a02b5e46f5f6aaf0a2066797f35c557c3e342d5"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.123165 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-m7g88" podStartSLOduration=3.123143932 podStartE2EDuration="3.123143932s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:35.106387531 +0000 UTC m=+1094.977417455" watchObservedRunningTime="2025-12-01 19:51:35.123143932 +0000 UTC m=+1094.994173846" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.124558 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerStarted","Data":"11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.140496 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tnc9l" event={"ID":"e867ec9b-1972-4745-8dea-944cc62c6db5","Type":"ContainerStarted","Data":"c5b982a1403bc0c37df0593e682017ffa3e292c9fd8defb954050208b02d6b38"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.143876 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerStarted","Data":"26b42ecf1e757466771ad9b17d813c2f73e0e2849aa0e90695b4ec226ab4afb7"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.164232 4888 generic.go:334] "Generic (PLEG): container finished" podID="7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" containerID="ca4b01ee693d1217e600beacb1f36b60524923c9c2db02fa6dd547f2d4ad4c85" exitCode=0 Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.164289 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" event={"ID":"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb","Type":"ContainerDied","Data":"ca4b01ee693d1217e600beacb1f36b60524923c9c2db02fa6dd547f2d4ad4c85"} Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.524802 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.565515 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.633213 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.634993 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.675013 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.687713 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.687780 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.687902 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.687945 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.688002 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pkj9\" (UniqueName: \"kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.724397 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.755344 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:35 crc kubenswrapper[4888]: W1201 19:51:35.783683 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8a43d3f_c717_4c67_90c1_b6ac3423cab2.slice/crio-6d2951b6a8d4e003ec1ef52967bd86ee1b0ba4c9c3fd3cad43c72029de9cf302 WatchSource:0}: Error finding container 6d2951b6a8d4e003ec1ef52967bd86ee1b0ba4c9c3fd3cad43c72029de9cf302: Status 404 returned error can't find the container with id 6d2951b6a8d4e003ec1ef52967bd86ee1b0ba4c9c3fd3cad43c72029de9cf302 Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.790197 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.790285 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.790346 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pkj9\" (UniqueName: \"kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.790391 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.790409 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.792255 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.793471 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.793650 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.846057 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pkj9\" (UniqueName: \"kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.857716 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key\") pod \"horizon-6666f788cc-5g6pk\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:35 crc kubenswrapper[4888]: I1201 19:51:35.887184 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:51:36 crc kubenswrapper[4888]: E1201 19:51:36.034925 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.125076 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.131508 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.197856 4888 generic.go:334] "Generic (PLEG): container finished" podID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerID="dbf6fc0958d95528a55edf2df8c8def0e9328adbadc5e96e16bbcb3cb235cdf5" exitCode=0 Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.197971 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerDied","Data":"dbf6fc0958d95528a55edf2df8c8def0e9328adbadc5e96e16bbcb3cb235cdf5"} Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.198033 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerStarted","Data":"8acf1013fd4d03b9021d8bebebd562581de391dc1d0f51b4ed6dc95453ff340c"} Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.198064 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.208598 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerStarted","Data":"6d2951b6a8d4e003ec1ef52967bd86ee1b0ba4c9c3fd3cad43c72029de9cf302"} Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.215606 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" event={"ID":"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb","Type":"ContainerDied","Data":"2f50efb7cc6927fc35ca72d97f1aa81851b8dffaf70a575eb75716b8d10a9335"} Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.215656 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-p5g9j" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.215685 4888 scope.go:117] "RemoveContainer" containerID="ca4b01ee693d1217e600beacb1f36b60524923c9c2db02fa6dd547f2d4ad4c85" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.215628 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.216845 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w58ww\" (UniqueName: \"kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.217013 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.217495 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.217571 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.217659 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0\") pod \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\" (UID: \"7a9ffd6c-6443-4126-ae73-7edde4b9b3bb\") " Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.237989 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww" (OuterVolumeSpecName: "kube-api-access-w58ww") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "kube-api-access-w58ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.238101 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" podStartSLOduration=4.238072413 podStartE2EDuration="4.238072413s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:36.237124568 +0000 UTC m=+1096.108154502" watchObservedRunningTime="2025-12-01 19:51:36.238072413 +0000 UTC m=+1096.109102317" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.273237 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.278486 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config" (OuterVolumeSpecName: "config") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.283590 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.288327 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.311959 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" (UID: "7a9ffd6c-6443-4126-ae73-7edde4b9b3bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335428 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335483 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335502 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335514 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335526 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w58ww\" (UniqueName: \"kubernetes.io/projected/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-kube-api-access-w58ww\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.335539 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.685137 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.693355 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-p5g9j"] Dec 01 19:51:36 crc kubenswrapper[4888]: I1201 19:51:36.865466 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:51:36 crc kubenswrapper[4888]: W1201 19:51:36.894915 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf738f310_2f55_4a77_b9e9_b654891b3ef0.slice/crio-11ac44600abd653e7fd8b69c21f0da29c57c01e64185316c12d99ee7c6e44b05 WatchSource:0}: Error finding container 11ac44600abd653e7fd8b69c21f0da29c57c01e64185316c12d99ee7c6e44b05: Status 404 returned error can't find the container with id 11ac44600abd653e7fd8b69c21f0da29c57c01e64185316c12d99ee7c6e44b05 Dec 01 19:51:37 crc kubenswrapper[4888]: I1201 19:51:37.261081 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerStarted","Data":"11ac44600abd653e7fd8b69c21f0da29c57c01e64185316c12d99ee7c6e44b05"} Dec 01 19:51:37 crc kubenswrapper[4888]: I1201 19:51:37.285412 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerStarted","Data":"847f7eecf87676fb7155fbb0e15c20567c382be08c3e7c20f1e8643b8cf9869f"} Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.301941 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerStarted","Data":"9d8034027a168af8bbcc6cb8391cc2357a9ce9f5d89c41dc174038b937fb752a"} Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.324187 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerStarted","Data":"8aa8729b33c267201ccfc1fcc6da139ec6af90c06d3c56138506ad237a9fb894"} Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.324448 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-log" containerID="cri-o://847f7eecf87676fb7155fbb0e15c20567c382be08c3e7c20f1e8643b8cf9869f" gracePeriod=30 Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.325115 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-httpd" containerID="cri-o://8aa8729b33c267201ccfc1fcc6da139ec6af90c06d3c56138506ad237a9fb894" gracePeriod=30 Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.366828 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.36678742 podStartE2EDuration="6.36678742s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:38.360049887 +0000 UTC m=+1098.231079801" watchObservedRunningTime="2025-12-01 19:51:38.36678742 +0000 UTC m=+1098.237817334" Dec 01 19:51:38 crc kubenswrapper[4888]: I1201 19:51:38.485625 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" path="/var/lib/kubelet/pods/7a9ffd6c-6443-4126-ae73-7edde4b9b3bb/volumes" Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.483799 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerStarted","Data":"37deb783c0ee4794d0221ba9115b450c7df4dd144efbc9f4e6474bd31fbb2c22"} Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.484809 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-log" containerID="cri-o://9d8034027a168af8bbcc6cb8391cc2357a9ce9f5d89c41dc174038b937fb752a" gracePeriod=30 Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.485015 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-httpd" containerID="cri-o://37deb783c0ee4794d0221ba9115b450c7df4dd144efbc9f4e6474bd31fbb2c22" gracePeriod=30 Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.501866 4888 generic.go:334] "Generic (PLEG): container finished" podID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerID="8aa8729b33c267201ccfc1fcc6da139ec6af90c06d3c56138506ad237a9fb894" exitCode=0 Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.502282 4888 generic.go:334] "Generic (PLEG): container finished" podID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerID="847f7eecf87676fb7155fbb0e15c20567c382be08c3e7c20f1e8643b8cf9869f" exitCode=143 Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.502428 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerDied","Data":"8aa8729b33c267201ccfc1fcc6da139ec6af90c06d3c56138506ad237a9fb894"} Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.502498 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerDied","Data":"847f7eecf87676fb7155fbb0e15c20567c382be08c3e7c20f1e8643b8cf9869f"} Dec 01 19:51:39 crc kubenswrapper[4888]: I1201 19:51:39.664704 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.664621618 podStartE2EDuration="7.664621618s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:51:39.63790593 +0000 UTC m=+1099.508935844" watchObservedRunningTime="2025-12-01 19:51:39.664621618 +0000 UTC m=+1099.535651542" Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.529745 4888 generic.go:334] "Generic (PLEG): container finished" podID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerID="37deb783c0ee4794d0221ba9115b450c7df4dd144efbc9f4e6474bd31fbb2c22" exitCode=0 Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.530291 4888 generic.go:334] "Generic (PLEG): container finished" podID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerID="9d8034027a168af8bbcc6cb8391cc2357a9ce9f5d89c41dc174038b937fb752a" exitCode=143 Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.529786 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerDied","Data":"37deb783c0ee4794d0221ba9115b450c7df4dd144efbc9f4e6474bd31fbb2c22"} Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.530448 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerDied","Data":"9d8034027a168af8bbcc6cb8391cc2357a9ce9f5d89c41dc174038b937fb752a"} Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.541316 4888 generic.go:334] "Generic (PLEG): container finished" podID="2ee1004c-9319-4275-9123-49e393196c06" containerID="5cc9313e30870523146105d1e43acd46a476c4390f8804e246937d98c1acc895" exitCode=0 Dec 01 19:51:40 crc kubenswrapper[4888]: I1201 19:51:40.541378 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8692b" event={"ID":"2ee1004c-9319-4275-9123-49e393196c06","Type":"ContainerDied","Data":"5cc9313e30870523146105d1e43acd46a476c4390f8804e246937d98c1acc895"} Dec 01 19:51:43 crc kubenswrapper[4888]: I1201 19:51:43.213655 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:51:43 crc kubenswrapper[4888]: I1201 19:51:43.295641 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:51:43 crc kubenswrapper[4888]: I1201 19:51:43.295940 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" containerID="cri-o://2c5380c2edff747804a73caa67786e13477916226ba82d01b99d7a8beed8f670" gracePeriod=10 Dec 01 19:51:43 crc kubenswrapper[4888]: I1201 19:51:43.599170 4888 generic.go:334] "Generic (PLEG): container finished" podID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerID="2c5380c2edff747804a73caa67786e13477916226ba82d01b99d7a8beed8f670" exitCode=0 Dec 01 19:51:43 crc kubenswrapper[4888]: I1201 19:51:43.599607 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" event={"ID":"605da06d-22a2-423c-8acd-7fd22fcbc0b8","Type":"ContainerDied","Data":"2c5380c2edff747804a73caa67786e13477916226ba82d01b99d7a8beed8f670"} Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.064874 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.133094 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:51:44 crc kubenswrapper[4888]: E1201 19:51:44.133616 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" containerName="init" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.133635 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" containerName="init" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.133858 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a9ffd6c-6443-4126-ae73-7edde4b9b3bb" containerName="init" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.135048 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.140535 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.158630 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.228502 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.261216 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d8bccccd8-fw8bk"] Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265025 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzzh8\" (UniqueName: \"kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265087 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265170 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265299 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265372 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265404 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.265492 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.271128 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.293367 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d8bccccd8-fw8bk"] Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.367078 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-scripts\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.367215 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-config-data\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.367274 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.367300 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.367326 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-tls-certs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.368736 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.368819 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.368886 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5006252a-8f29-475c-9847-e2d6662ff13b-logs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.368920 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369175 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369214 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-secret-key\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369437 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-combined-ca-bundle\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369657 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369749 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl27x\" (UniqueName: \"kubernetes.io/projected/5006252a-8f29-475c-9847-e2d6662ff13b-kube-api-access-xl27x\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369822 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzzh8\" (UniqueName: \"kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.369864 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.370750 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.376423 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.376772 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.413386 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.431816 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzzh8\" (UniqueName: \"kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8\") pod \"horizon-856c6474d8-q6nhf\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.473787 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-combined-ca-bundle\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.473907 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl27x\" (UniqueName: \"kubernetes.io/projected/5006252a-8f29-475c-9847-e2d6662ff13b-kube-api-access-xl27x\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.473973 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-scripts\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.474001 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-config-data\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.474047 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-tls-certs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.474073 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5006252a-8f29-475c-9847-e2d6662ff13b-logs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.474102 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-secret-key\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.475954 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-scripts\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.484666 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5006252a-8f29-475c-9847-e2d6662ff13b-logs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.485766 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5006252a-8f29-475c-9847-e2d6662ff13b-config-data\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.497371 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-combined-ca-bundle\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.498860 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-tls-certs\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.505772 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5006252a-8f29-475c-9847-e2d6662ff13b-horizon-secret-key\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.506906 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.567569 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl27x\" (UniqueName: \"kubernetes.io/projected/5006252a-8f29-475c-9847-e2d6662ff13b-kube-api-access-xl27x\") pod \"horizon-d8bccccd8-fw8bk\" (UID: \"5006252a-8f29-475c-9847-e2d6662ff13b\") " pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:44 crc kubenswrapper[4888]: I1201 19:51:44.605121 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:51:45 crc kubenswrapper[4888]: I1201 19:51:45.202512 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Dec 01 19:51:46 crc kubenswrapper[4888]: E1201 19:51:46.333984 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:51:49 crc kubenswrapper[4888]: I1201 19:51:49.890169 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007283 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007579 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007706 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007773 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007809 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.007842 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hg22\" (UniqueName: \"kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22\") pod \"2ee1004c-9319-4275-9123-49e393196c06\" (UID: \"2ee1004c-9319-4275-9123-49e393196c06\") " Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.015089 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts" (OuterVolumeSpecName: "scripts") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.015968 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22" (OuterVolumeSpecName: "kube-api-access-2hg22") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "kube-api-access-2hg22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.016031 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.016435 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.035909 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.037344 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.037404 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.044372 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data" (OuterVolumeSpecName: "config-data") pod "2ee1004c-9319-4275-9123-49e393196c06" (UID: "2ee1004c-9319-4275-9123-49e393196c06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110591 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110630 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110640 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hg22\" (UniqueName: \"kubernetes.io/projected/2ee1004c-9319-4275-9123-49e393196c06-kube-api-access-2hg22\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110652 4888 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110660 4888 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.110669 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ee1004c-9319-4275-9123-49e393196c06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.202754 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: connect: connection refused" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.702603 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8692b" event={"ID":"2ee1004c-9319-4275-9123-49e393196c06","Type":"ContainerDied","Data":"9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1"} Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.702646 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9cd2adb4c0abc98cec340288f131e3705e2a6d61578233df20649c62c08f4af1" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.702699 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8692b" Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.979526 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-8692b"] Dec 01 19:51:50 crc kubenswrapper[4888]: I1201 19:51:50.988745 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-8692b"] Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.080938 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-54q8p"] Dec 01 19:51:51 crc kubenswrapper[4888]: E1201 19:51:51.081959 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ee1004c-9319-4275-9123-49e393196c06" containerName="keystone-bootstrap" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.082052 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ee1004c-9319-4275-9123-49e393196c06" containerName="keystone-bootstrap" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.082477 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ee1004c-9319-4275-9123-49e393196c06" containerName="keystone-bootstrap" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.083608 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.088087 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.088621 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.088939 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-skcf5" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.090011 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.090698 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.097776 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-54q8p"] Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.134929 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.135015 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.135067 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.135119 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.135141 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.135169 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6c67\" (UniqueName: \"kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236539 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236626 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236646 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236688 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6c67\" (UniqueName: \"kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236738 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.236801 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.243726 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.244516 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.247170 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.247694 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.251136 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.254592 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6c67\" (UniqueName: \"kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67\") pod \"keystone-bootstrap-54q8p\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:51 crc kubenswrapper[4888]: I1201 19:51:51.408393 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:51:52 crc kubenswrapper[4888]: I1201 19:51:52.464137 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ee1004c-9319-4275-9123-49e393196c06" path="/var/lib/kubelet/pods/2ee1004c-9319-4275-9123-49e393196c06/volumes" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.488332 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.496927 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580760 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580845 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580864 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580920 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580952 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.580967 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584197 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584387 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584449 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584485 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584519 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584534 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs\") pod \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\" (UID: \"a8a43d3f-c717-4c67-90c1-b6ac3423cab2\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584558 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.584603 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdg2f\" (UniqueName: \"kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f\") pod \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\" (UID: \"5a86ecdc-69f3-43be-adae-8b8a94dd7f73\") " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.585626 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.586018 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.586393 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs" (OuterVolumeSpecName: "logs") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.589353 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs" (OuterVolumeSpecName: "logs") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.590482 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.592883 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.594253 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f" (OuterVolumeSpecName: "kube-api-access-gdg2f") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "kube-api-access-gdg2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.594514 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts" (OuterVolumeSpecName: "scripts") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.594781 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts" (OuterVolumeSpecName: "scripts") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.607447 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx" (OuterVolumeSpecName: "kube-api-access-78lmx") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "kube-api-access-78lmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.622890 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.625203 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.647599 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data" (OuterVolumeSpecName: "config-data") pod "5a86ecdc-69f3-43be-adae-8b8a94dd7f73" (UID: "5a86ecdc-69f3-43be-adae-8b8a94dd7f73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.648290 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data" (OuterVolumeSpecName: "config-data") pod "a8a43d3f-c717-4c67-90c1-b6ac3423cab2" (UID: "a8a43d3f-c717-4c67-90c1-b6ac3423cab2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687391 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdg2f\" (UniqueName: \"kubernetes.io/projected/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-kube-api-access-gdg2f\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687422 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687433 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687472 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687481 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687490 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-kube-api-access-78lmx\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687498 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687507 4888 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687515 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687526 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687534 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a86ecdc-69f3-43be-adae-8b8a94dd7f73-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687542 4888 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687550 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8a43d3f-c717-4c67-90c1-b6ac3423cab2-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.687562 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.709489 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.710812 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.729452 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a8a43d3f-c717-4c67-90c1-b6ac3423cab2","Type":"ContainerDied","Data":"6d2951b6a8d4e003ec1ef52967bd86ee1b0ba4c9c3fd3cad43c72029de9cf302"} Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.729515 4888 scope.go:117] "RemoveContainer" containerID="37deb783c0ee4794d0221ba9115b450c7df4dd144efbc9f4e6474bd31fbb2c22" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.729685 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.743425 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5a86ecdc-69f3-43be-adae-8b8a94dd7f73","Type":"ContainerDied","Data":"22574cc9d19f6ea6210356e364e6cbaf619760b17fd375ec46009428a8208913"} Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.743542 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.776008 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.788588 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.790629 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.790668 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.821419 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.835087 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.842833 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: E1201 19:51:53.843587 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.843620 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: E1201 19:51:53.843638 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.843648 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: E1201 19:51:53.843665 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.843676 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: E1201 19:51:53.843690 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.843698 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.844057 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.844102 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.844121 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" containerName="glance-httpd" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.844134 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" containerName="glance-log" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.845706 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.851398 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.851465 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.851705 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jllkx" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.851733 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.856378 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.867762 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.871841 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.876064 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.876451 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.881792 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994153 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994221 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994254 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994275 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994665 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt7bw\" (UniqueName: \"kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994723 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994785 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994931 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994957 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.994980 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995035 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995073 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995149 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995177 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995475 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:53 crc kubenswrapper[4888]: I1201 19:51:53.995740 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6x9j\" (UniqueName: \"kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097543 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt7bw\" (UniqueName: \"kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097598 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097645 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097712 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097741 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097761 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097784 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097812 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097850 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097873 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097892 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097923 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6x9j\" (UniqueName: \"kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097970 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.097990 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.098011 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.098030 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.098116 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.098909 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.098992 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.099066 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.099348 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.099578 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.108494 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.111160 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.113217 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.114877 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.115830 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.116759 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.118646 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.118661 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt7bw\" (UniqueName: \"kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.118836 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6x9j\" (UniqueName: \"kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.125912 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.148825 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.153596 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.183716 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.202282 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.465051 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a86ecdc-69f3-43be-adae-8b8a94dd7f73" path="/var/lib/kubelet/pods/5a86ecdc-69f3-43be-adae-8b8a94dd7f73/volumes" Dec 01 19:51:54 crc kubenswrapper[4888]: I1201 19:51:54.466941 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8a43d3f-c717-4c67-90c1-b6ac3423cab2" path="/var/lib/kubelet/pods/a8a43d3f-c717-4c67-90c1-b6ac3423cab2/volumes" Dec 01 19:51:56 crc kubenswrapper[4888]: E1201 19:51:56.564323 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:51:57 crc kubenswrapper[4888]: I1201 19:51:57.790033 4888 generic.go:334] "Generic (PLEG): container finished" podID="b19201ab-fb2d-4011-aa2f-a078153687d1" containerID="503c0e5e504a0a5afdd90463196b3c8820f4b717bc53ecb3ecebc6692ecf33bd" exitCode=0 Dec 01 19:51:57 crc kubenswrapper[4888]: I1201 19:51:57.790076 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m7g88" event={"ID":"b19201ab-fb2d-4011-aa2f-a078153687d1","Type":"ContainerDied","Data":"503c0e5e504a0a5afdd90463196b3c8820f4b717bc53ecb3ecebc6692ecf33bd"} Dec 01 19:52:00 crc kubenswrapper[4888]: I1201 19:52:00.202443 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Dec 01 19:52:00 crc kubenswrapper[4888]: I1201 19:52:00.203226 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:52:01 crc kubenswrapper[4888]: E1201 19:52:01.576350 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 01 19:52:01 crc kubenswrapper[4888]: E1201 19:52:01.576527 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n666h575h696h59dh65bh596h545h686h57dh594h96hd8hbfh4h699h548h574h59bhc8h55ch585h647h594h5cfh5f6h65bh64h657h595hcdh5c4hdfq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hbkzn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(ce9a19cb-f999-49ca-89f8-e5ab13e453a4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:52:02 crc kubenswrapper[4888]: E1201 19:52:02.127637 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 01 19:52:02 crc kubenswrapper[4888]: E1201 19:52:02.128002 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-btz6w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-xk69h_openstack(047835ab-4a66-4ff8-9252-c9c5ca0d0352): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:52:02 crc kubenswrapper[4888]: E1201 19:52:02.129415 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-xk69h" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.246767 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.285802 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m7g88" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.360958 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle\") pod \"b19201ab-fb2d-4011-aa2f-a078153687d1\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361078 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgccl\" (UniqueName: \"kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl\") pod \"b19201ab-fb2d-4011-aa2f-a078153687d1\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361119 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdc22\" (UniqueName: \"kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361138 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361174 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361222 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config\") pod \"b19201ab-fb2d-4011-aa2f-a078153687d1\" (UID: \"b19201ab-fb2d-4011-aa2f-a078153687d1\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361251 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361320 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.361357 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb\") pod \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\" (UID: \"605da06d-22a2-423c-8acd-7fd22fcbc0b8\") " Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.366392 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22" (OuterVolumeSpecName: "kube-api-access-tdc22") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "kube-api-access-tdc22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.367304 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl" (OuterVolumeSpecName: "kube-api-access-fgccl") pod "b19201ab-fb2d-4011-aa2f-a078153687d1" (UID: "b19201ab-fb2d-4011-aa2f-a078153687d1"). InnerVolumeSpecName "kube-api-access-fgccl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.401332 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b19201ab-fb2d-4011-aa2f-a078153687d1" (UID: "b19201ab-fb2d-4011-aa2f-a078153687d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.411359 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config" (OuterVolumeSpecName: "config") pod "b19201ab-fb2d-4011-aa2f-a078153687d1" (UID: "b19201ab-fb2d-4011-aa2f-a078153687d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.418641 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config" (OuterVolumeSpecName: "config") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.420573 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.421644 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.431212 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.435838 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "605da06d-22a2-423c-8acd-7fd22fcbc0b8" (UID: "605da06d-22a2-423c-8acd-7fd22fcbc0b8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464101 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgccl\" (UniqueName: \"kubernetes.io/projected/b19201ab-fb2d-4011-aa2f-a078153687d1-kube-api-access-fgccl\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464491 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464582 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdc22\" (UniqueName: \"kubernetes.io/projected/605da06d-22a2-423c-8acd-7fd22fcbc0b8-kube-api-access-tdc22\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464745 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464822 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464896 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.464968 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.465039 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/605da06d-22a2-423c-8acd-7fd22fcbc0b8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.465111 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b19201ab-fb2d-4011-aa2f-a078153687d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.851474 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.851468 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" event={"ID":"605da06d-22a2-423c-8acd-7fd22fcbc0b8","Type":"ContainerDied","Data":"863b1f3f1627a2e6389b26e0940c36d9045f396bd268304c24197db52a8aeb81"} Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.859987 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-m7g88" event={"ID":"b19201ab-fb2d-4011-aa2f-a078153687d1","Type":"ContainerDied","Data":"86763ff365ccd297610ad21233a06e3075f9260b8117adf54b9585bf054c5ff3"} Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.860219 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86763ff365ccd297610ad21233a06e3075f9260b8117adf54b9585bf054c5ff3" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.860021 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-m7g88" Dec 01 19:52:02 crc kubenswrapper[4888]: E1201 19:52:02.863597 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-xk69h" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.905101 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:52:02 crc kubenswrapper[4888]: I1201 19:52:02.912297 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-n5tgx"] Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.559690 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.560500 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.560560 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.560623 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="init" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.560675 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="init" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.560728 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b19201ab-fb2d-4011-aa2f-a078153687d1" containerName="neutron-db-sync" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.560772 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b19201ab-fb2d-4011-aa2f-a078153687d1" containerName="neutron-db-sync" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.560984 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.561047 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b19201ab-fb2d-4011-aa2f-a078153687d1" containerName="neutron-db-sync" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.562063 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.593850 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.594707 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pbf64,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-tnc9l_openstack(e867ec9b-1972-4745-8dea-944cc62c6db5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.604370 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-tnc9l" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.622532 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.701937 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj2jz\" (UniqueName: \"kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.702002 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.702022 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.702053 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.702084 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.702142 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.715287 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.716354 4888 scope.go:117] "RemoveContainer" containerID="9d8034027a168af8bbcc6cb8391cc2357a9ce9f5d89c41dc174038b937fb752a" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.718329 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.720808 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.720983 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-ggfk2" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.721130 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.731946 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.743837 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803521 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803859 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803896 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj2jz\" (UniqueName: \"kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803916 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803941 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlll8\" (UniqueName: \"kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.803994 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.804030 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.804066 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.804106 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.804134 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.804174 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.805559 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.806480 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.807105 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.807770 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.810809 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.852706 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj2jz\" (UniqueName: \"kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz\") pod \"dnsmasq-dns-55f844cf75-c88kt\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: E1201 19:52:03.886401 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-tnc9l" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.907496 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.907563 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.907613 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.907645 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.907663 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlll8\" (UniqueName: \"kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.924972 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.928382 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.928582 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlll8\" (UniqueName: \"kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.929752 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.931777 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs\") pod \"neutron-7894c9f948-wgblk\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.973282 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:03 crc kubenswrapper[4888]: I1201 19:52:03.999620 4888 scope.go:117] "RemoveContainer" containerID="8aa8729b33c267201ccfc1fcc6da139ec6af90c06d3c56138506ad237a9fb894" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.056521 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.122814 4888 scope.go:117] "RemoveContainer" containerID="847f7eecf87676fb7155fbb0e15c20567c382be08c3e7c20f1e8643b8cf9869f" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.221826 4888 scope.go:117] "RemoveContainer" containerID="2c5380c2edff747804a73caa67786e13477916226ba82d01b99d7a8beed8f670" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.272637 4888 scope.go:117] "RemoveContainer" containerID="bfdbe00013de156dba963ac18f2d58e39f5c3f2308f2e8d00b43f8606980ebbd" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.473091 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" path="/var/lib/kubelet/pods/605da06d-22a2-423c-8acd-7fd22fcbc0b8/volumes" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.480436 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d8bccccd8-fw8bk"] Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.526265 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-54q8p"] Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.675269 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.745967 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.844265 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.899991 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerStarted","Data":"e2d5f83f4c79d040e322b79e2b621d742fd97d1f3b24ab9d9ccd8bdb938d277f"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.921562 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8dw7s" event={"ID":"c4b06642-351e-4bc5-b48e-ab8b6ddf750c","Type":"ContainerStarted","Data":"a47bfb9686a24a8042cb92fc6e06f13dc106b026bf9c2dbc88683399fe34a64b"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.942346 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerStarted","Data":"4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.946141 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerStarted","Data":"f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.948185 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-8dw7s" podStartSLOduration=5.557290059 podStartE2EDuration="32.948160815s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="2025-12-01 19:51:34.1966352 +0000 UTC m=+1094.067665114" lastFinishedPulling="2025-12-01 19:52:01.587505956 +0000 UTC m=+1121.458535870" observedRunningTime="2025-12-01 19:52:04.939626196 +0000 UTC m=+1124.810656120" watchObservedRunningTime="2025-12-01 19:52:04.948160815 +0000 UTC m=+1124.819190729" Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.970219 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d8bccccd8-fw8bk" event={"ID":"5006252a-8f29-475c-9847-e2d6662ff13b","Type":"ContainerStarted","Data":"6981b2a0e8fe0fba2ef8ce1542570b26839e7dc71151a18bb66e94eeb4ea4bf5"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.975601 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerStarted","Data":"f5cafde9ab32748bb28aacfc8a89d8726a418eced2306e2e75a8bf901e49ca45"} Dec 01 19:52:04 crc kubenswrapper[4888]: I1201 19:52:04.990777 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerStarted","Data":"7fbd2b14126e4e8b06093d69d7f98821adf50fdd9d56122d6a6c49897f334b46"} Dec 01 19:52:05 crc kubenswrapper[4888]: I1201 19:52:04.995841 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerStarted","Data":"c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9"} Dec 01 19:52:05 crc kubenswrapper[4888]: I1201 19:52:05.025963 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54q8p" event={"ID":"a9337539-212d-4ad9-9572-80712d40784d","Type":"ContainerStarted","Data":"544cddc5f5072475c64caadac2c73e6cd911835214bb4fb1945c77629948d28b"} Dec 01 19:52:05 crc kubenswrapper[4888]: I1201 19:52:05.051478 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:05 crc kubenswrapper[4888]: W1201 19:52:05.088880 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf3749b5_b4ea_4296_8cde_88fcf8011abe.slice/crio-430177977dbe6876fedaddca1e76a1d8be2e725273a162b8beb821a07c118168 WatchSource:0}: Error finding container 430177977dbe6876fedaddca1e76a1d8be2e725273a162b8beb821a07c118168: Status 404 returned error can't find the container with id 430177977dbe6876fedaddca1e76a1d8be2e725273a162b8beb821a07c118168 Dec 01 19:52:05 crc kubenswrapper[4888]: I1201 19:52:05.111329 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:05 crc kubenswrapper[4888]: I1201 19:52:05.205740 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-n5tgx" podUID="605da06d-22a2-423c-8acd-7fd22fcbc0b8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.125:5353: i/o timeout" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.081612 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerStarted","Data":"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.093883 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerStarted","Data":"31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.094057 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7f76bc979c-6tnbl" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon-log" containerID="cri-o://c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.094639 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7f76bc979c-6tnbl" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon" containerID="cri-o://31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.132786 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d8bccccd8-fw8bk" event={"ID":"5006252a-8f29-475c-9847-e2d6662ff13b","Type":"ContainerStarted","Data":"1d446f66135b8c3a79cbbaca6521d645fa25fbed73b7770f69d899b0d1a355e3"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.136379 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerStarted","Data":"ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.136367 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7f76bc979c-6tnbl" podStartSLOduration=5.033732726 podStartE2EDuration="34.13634613s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="2025-12-01 19:51:34.663628187 +0000 UTC m=+1094.534658101" lastFinishedPulling="2025-12-01 19:52:03.766241591 +0000 UTC m=+1123.637271505" observedRunningTime="2025-12-01 19:52:06.130666384 +0000 UTC m=+1126.001696308" watchObservedRunningTime="2025-12-01 19:52:06.13634613 +0000 UTC m=+1126.007376034" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.136496 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d745c59d5-8h7kr" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon-log" containerID="cri-o://4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.136875 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7d745c59d5-8h7kr" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon" containerID="cri-o://ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.140583 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" event={"ID":"df3749b5-b4ea-4296-8cde-88fcf8011abe","Type":"ContainerStarted","Data":"430177977dbe6876fedaddca1e76a1d8be2e725273a162b8beb821a07c118168"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.147608 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerStarted","Data":"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.157245 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerStarted","Data":"f515790c70d1eead67fe9252c7b6223092857b895f50d02c76a3c3fc28b6092c"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.165154 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7d745c59d5-8h7kr" podStartSLOduration=5.483047449 podStartE2EDuration="34.165136461s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="2025-12-01 19:51:33.434236052 +0000 UTC m=+1093.305265966" lastFinishedPulling="2025-12-01 19:52:02.116325064 +0000 UTC m=+1121.987354978" observedRunningTime="2025-12-01 19:52:06.161921858 +0000 UTC m=+1126.032951772" watchObservedRunningTime="2025-12-01 19:52:06.165136461 +0000 UTC m=+1126.036166375" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.180910 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerStarted","Data":"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.200827 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerStarted","Data":"c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.200981 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6666f788cc-5g6pk" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon-log" containerID="cri-o://f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.200996 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6666f788cc-5g6pk" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon" containerID="cri-o://c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7" gracePeriod=30 Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.215894 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54q8p" event={"ID":"a9337539-212d-4ad9-9572-80712d40784d","Type":"ContainerStarted","Data":"b81cebe4ed41c65090024b191d59b7d1e00a2e5623fc1fbad0d03089f71776bf"} Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.265154 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-58df6cb45-qjhmp"] Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.267000 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.274084 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.276198 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.276346 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6666f788cc-5g6pk" podStartSLOduration=4.452919079 podStartE2EDuration="31.276328822s" podCreationTimestamp="2025-12-01 19:51:35 +0000 UTC" firstStartedPulling="2025-12-01 19:51:36.925490632 +0000 UTC m=+1096.796520546" lastFinishedPulling="2025-12-01 19:52:03.748900375 +0000 UTC m=+1123.619930289" observedRunningTime="2025-12-01 19:52:06.237984906 +0000 UTC m=+1126.109014820" watchObservedRunningTime="2025-12-01 19:52:06.276328822 +0000 UTC m=+1126.147358736" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.299522 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-58df6cb45-qjhmp"] Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.300315 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-54q8p" podStartSLOduration=15.300296369 podStartE2EDuration="15.300296369s" podCreationTimestamp="2025-12-01 19:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:06.274896855 +0000 UTC m=+1126.145926769" watchObservedRunningTime="2025-12-01 19:52:06.300296369 +0000 UTC m=+1126.171326283" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.396371 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-httpd-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.396772 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p29z4\" (UniqueName: \"kubernetes.io/projected/a4b29995-f291-4e12-bfb1-fad0318b0416-kube-api-access-p29z4\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.396846 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-combined-ca-bundle\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.396884 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.396912 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-internal-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.397035 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-ovndb-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.397126 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-public-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498596 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-httpd-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498650 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p29z4\" (UniqueName: \"kubernetes.io/projected/a4b29995-f291-4e12-bfb1-fad0318b0416-kube-api-access-p29z4\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498702 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-combined-ca-bundle\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498739 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498762 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-internal-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498832 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-ovndb-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.498864 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-public-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.506850 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-httpd-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.507903 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-combined-ca-bundle\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.508309 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-ovndb-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.508958 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-internal-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.510131 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-config\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.510310 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4b29995-f291-4e12-bfb1-fad0318b0416-public-tls-certs\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.521072 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p29z4\" (UniqueName: \"kubernetes.io/projected/a4b29995-f291-4e12-bfb1-fad0318b0416-kube-api-access-p29z4\") pod \"neutron-58df6cb45-qjhmp\" (UID: \"a4b29995-f291-4e12-bfb1-fad0318b0416\") " pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: I1201 19:52:06.602010 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:06 crc kubenswrapper[4888]: E1201 19:52:06.905470 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.223377 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerStarted","Data":"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.225002 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerStarted","Data":"0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.227134 4888 generic.go:334] "Generic (PLEG): container finished" podID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerID="5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d" exitCode=0 Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.227210 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" event={"ID":"df3749b5-b4ea-4296-8cde-88fcf8011abe","Type":"ContainerDied","Data":"5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.229264 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerStarted","Data":"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.231544 4888 generic.go:334] "Generic (PLEG): container finished" podID="c4b06642-351e-4bc5-b48e-ab8b6ddf750c" containerID="a47bfb9686a24a8042cb92fc6e06f13dc106b026bf9c2dbc88683399fe34a64b" exitCode=0 Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.231602 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8dw7s" event={"ID":"c4b06642-351e-4bc5-b48e-ab8b6ddf750c","Type":"ContainerDied","Data":"a47bfb9686a24a8042cb92fc6e06f13dc106b026bf9c2dbc88683399fe34a64b"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.248711 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerStarted","Data":"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.248772 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerStarted","Data":"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.249684 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.266226 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-856c6474d8-q6nhf" podStartSLOduration=23.266186774 podStartE2EDuration="23.266186774s" podCreationTimestamp="2025-12-01 19:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:07.260600631 +0000 UTC m=+1127.131630545" watchObservedRunningTime="2025-12-01 19:52:07.266186774 +0000 UTC m=+1127.137216688" Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.290852 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d8bccccd8-fw8bk" event={"ID":"5006252a-8f29-475c-9847-e2d6662ff13b","Type":"ContainerStarted","Data":"bf7af7c698be94c76237b412450cce411f97429d71403222d8f19ab59e280782"} Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.373638 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7894c9f948-wgblk" podStartSLOduration=4.373617179 podStartE2EDuration="4.373617179s" podCreationTimestamp="2025-12-01 19:52:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:07.359390203 +0000 UTC m=+1127.230420117" watchObservedRunningTime="2025-12-01 19:52:07.373617179 +0000 UTC m=+1127.244647093" Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.414668 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=14.414643285 podStartE2EDuration="14.414643285s" podCreationTimestamp="2025-12-01 19:51:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:07.393753907 +0000 UTC m=+1127.264783831" watchObservedRunningTime="2025-12-01 19:52:07.414643285 +0000 UTC m=+1127.285673199" Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.446603 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-58df6cb45-qjhmp"] Dec 01 19:52:07 crc kubenswrapper[4888]: I1201 19:52:07.459551 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-d8bccccd8-fw8bk" podStartSLOduration=23.45953039 podStartE2EDuration="23.45953039s" podCreationTimestamp="2025-12-01 19:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:07.44749446 +0000 UTC m=+1127.318524374" watchObservedRunningTime="2025-12-01 19:52:07.45953039 +0000 UTC m=+1127.330560304" Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.299654 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-58df6cb45-qjhmp" event={"ID":"a4b29995-f291-4e12-bfb1-fad0318b0416","Type":"ContainerStarted","Data":"5d3c97ef7d037c32a671b0cec263ae61bba0e110eb5647e0af18206ebb2d41e9"} Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.301671 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerStarted","Data":"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b"} Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.329101 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=15.329082636 podStartE2EDuration="15.329082636s" podCreationTimestamp="2025-12-01 19:51:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:08.32497405 +0000 UTC m=+1128.196003964" watchObservedRunningTime="2025-12-01 19:52:08.329082636 +0000 UTC m=+1128.200112550" Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.825540 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8dw7s" Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.984233 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs\") pod \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.984609 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data\") pod \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.984662 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle\") pod \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.984768 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph9l2\" (UniqueName: \"kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2\") pod \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.984796 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts\") pod \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\" (UID: \"c4b06642-351e-4bc5-b48e-ab8b6ddf750c\") " Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.985505 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs" (OuterVolumeSpecName: "logs") pod "c4b06642-351e-4bc5-b48e-ab8b6ddf750c" (UID: "c4b06642-351e-4bc5-b48e-ab8b6ddf750c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:08 crc kubenswrapper[4888]: I1201 19:52:08.985949 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.010089 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2" (OuterVolumeSpecName: "kube-api-access-ph9l2") pod "c4b06642-351e-4bc5-b48e-ab8b6ddf750c" (UID: "c4b06642-351e-4bc5-b48e-ab8b6ddf750c"). InnerVolumeSpecName "kube-api-access-ph9l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.012806 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts" (OuterVolumeSpecName: "scripts") pod "c4b06642-351e-4bc5-b48e-ab8b6ddf750c" (UID: "c4b06642-351e-4bc5-b48e-ab8b6ddf750c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.031580 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data" (OuterVolumeSpecName: "config-data") pod "c4b06642-351e-4bc5-b48e-ab8b6ddf750c" (UID: "c4b06642-351e-4bc5-b48e-ab8b6ddf750c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.055621 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4b06642-351e-4bc5-b48e-ab8b6ddf750c" (UID: "c4b06642-351e-4bc5-b48e-ab8b6ddf750c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.088078 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.088117 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.088130 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph9l2\" (UniqueName: \"kubernetes.io/projected/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-kube-api-access-ph9l2\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.088139 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4b06642-351e-4bc5-b48e-ab8b6ddf750c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.312119 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-8dw7s" event={"ID":"c4b06642-351e-4bc5-b48e-ab8b6ddf750c","Type":"ContainerDied","Data":"cda573797983fc2610e9ea6c782a6fe94ed8b6a0659f2bb18110416c8aab2dda"} Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.312208 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cda573797983fc2610e9ea6c782a6fe94ed8b6a0659f2bb18110416c8aab2dda" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.312135 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-8dw7s" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.376606 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6498684f7d-hltrn"] Dec 01 19:52:09 crc kubenswrapper[4888]: E1201 19:52:09.377018 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4b06642-351e-4bc5-b48e-ab8b6ddf750c" containerName="placement-db-sync" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.377034 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4b06642-351e-4bc5-b48e-ab8b6ddf750c" containerName="placement-db-sync" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.377219 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4b06642-351e-4bc5-b48e-ab8b6ddf750c" containerName="placement-db-sync" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.378151 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.381365 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2j785" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.381842 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.381934 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.381938 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.382073 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.392040 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6498684f7d-hltrn"] Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.494940 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9v5z\" (UniqueName: \"kubernetes.io/projected/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-kube-api-access-c9v5z\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.494994 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-config-data\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.495065 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-public-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.495254 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-scripts\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.495335 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-logs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.495418 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-combined-ca-bundle\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.495554 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-internal-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.596988 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-logs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.597061 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-combined-ca-bundle\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.597646 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-logs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.597140 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-internal-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.598020 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9v5z\" (UniqueName: \"kubernetes.io/projected/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-kube-api-access-c9v5z\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.598064 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-config-data\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.598126 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-public-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.598182 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-scripts\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.602133 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-public-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.605577 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-scripts\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.605770 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-config-data\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.605788 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-combined-ca-bundle\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.615027 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-internal-tls-certs\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.626938 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9v5z\" (UniqueName: \"kubernetes.io/projected/599b1532-a5ae-4a98-bcc2-cc6a9d93cae3-kube-api-access-c9v5z\") pod \"placement-6498684f7d-hltrn\" (UID: \"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3\") " pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:09 crc kubenswrapper[4888]: I1201 19:52:09.715039 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.306714 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6498684f7d-hltrn"] Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.389965 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-58df6cb45-qjhmp" event={"ID":"a4b29995-f291-4e12-bfb1-fad0318b0416","Type":"ContainerStarted","Data":"d44988a650de12052d9581f3f41d4958a1b6647744ed703f35237f09783689e8"} Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.390026 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-58df6cb45-qjhmp" event={"ID":"a4b29995-f291-4e12-bfb1-fad0318b0416","Type":"ContainerStarted","Data":"e9d5600545241a7bab6a94c66a546cbda09c0fe36aa3325d2632d8eee543bed9"} Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.390384 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.400495 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6498684f7d-hltrn" event={"ID":"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3","Type":"ContainerStarted","Data":"599eb1c349f694ef1da3d919f74755999d411f30a15afae400e27c48642ac90c"} Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.429405 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" event={"ID":"df3749b5-b4ea-4296-8cde-88fcf8011abe","Type":"ContainerStarted","Data":"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b"} Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.429480 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.478757 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-58df6cb45-qjhmp" podStartSLOduration=4.478737522 podStartE2EDuration="4.478737522s" podCreationTimestamp="2025-12-01 19:52:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:10.439147194 +0000 UTC m=+1130.310177108" watchObservedRunningTime="2025-12-01 19:52:10.478737522 +0000 UTC m=+1130.349767436" Dec 01 19:52:10 crc kubenswrapper[4888]: I1201 19:52:10.487758 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" podStartSLOduration=7.487738164 podStartE2EDuration="7.487738164s" podCreationTimestamp="2025-12-01 19:52:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:10.487012455 +0000 UTC m=+1130.358042369" watchObservedRunningTime="2025-12-01 19:52:10.487738164 +0000 UTC m=+1130.358768338" Dec 01 19:52:11 crc kubenswrapper[4888]: I1201 19:52:11.448636 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6498684f7d-hltrn" event={"ID":"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3","Type":"ContainerStarted","Data":"9aa6a9a977631bcab997b4ad56ab653e341f3b8961e6abc6ff23f345700bf0a3"} Dec 01 19:52:11 crc kubenswrapper[4888]: I1201 19:52:11.448980 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6498684f7d-hltrn" event={"ID":"599b1532-a5ae-4a98-bcc2-cc6a9d93cae3","Type":"ContainerStarted","Data":"f5ba393fc7291e1123e81478c09d0232519b52bb29716e0b7170381affc56b51"} Dec 01 19:52:11 crc kubenswrapper[4888]: I1201 19:52:11.486881 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6498684f7d-hltrn" podStartSLOduration=2.486859945 podStartE2EDuration="2.486859945s" podCreationTimestamp="2025-12-01 19:52:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:11.472175597 +0000 UTC m=+1131.343205521" watchObservedRunningTime="2025-12-01 19:52:11.486859945 +0000 UTC m=+1131.357889859" Dec 01 19:52:12 crc kubenswrapper[4888]: I1201 19:52:12.471345 4888 generic.go:334] "Generic (PLEG): container finished" podID="a9337539-212d-4ad9-9572-80712d40784d" containerID="b81cebe4ed41c65090024b191d59b7d1e00a2e5623fc1fbad0d03089f71776bf" exitCode=0 Dec 01 19:52:12 crc kubenswrapper[4888]: I1201 19:52:12.472155 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54q8p" event={"ID":"a9337539-212d-4ad9-9572-80712d40784d","Type":"ContainerDied","Data":"b81cebe4ed41c65090024b191d59b7d1e00a2e5623fc1fbad0d03089f71776bf"} Dec 01 19:52:12 crc kubenswrapper[4888]: I1201 19:52:12.472186 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:12 crc kubenswrapper[4888]: I1201 19:52:12.472219 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:12 crc kubenswrapper[4888]: I1201 19:52:12.709709 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:52:13 crc kubenswrapper[4888]: I1201 19:52:13.515670 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.184496 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.184840 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.203791 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.203848 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.251444 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.266124 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.282937 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.291202 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.498625 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.498671 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.499927 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.499944 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.508466 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.508663 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.606369 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:52:14 crc kubenswrapper[4888]: I1201 19:52:14.607226 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.680966 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774118 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774204 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774241 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774336 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6c67\" (UniqueName: \"kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774391 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.774457 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys\") pod \"a9337539-212d-4ad9-9572-80712d40784d\" (UID: \"a9337539-212d-4ad9-9572-80712d40784d\") " Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.780463 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.783636 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67" (OuterVolumeSpecName: "kube-api-access-x6c67") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "kube-api-access-x6c67". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.799699 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.800939 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts" (OuterVolumeSpecName: "scripts") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.822252 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data" (OuterVolumeSpecName: "config-data") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.825470 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9337539-212d-4ad9-9572-80712d40784d" (UID: "a9337539-212d-4ad9-9572-80712d40784d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876472 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876519 4888 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876542 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876555 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876565 4888 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9337539-212d-4ad9-9572-80712d40784d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:15 crc kubenswrapper[4888]: I1201 19:52:15.876578 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6c67\" (UniqueName: \"kubernetes.io/projected/a9337539-212d-4ad9-9572-80712d40784d-kube-api-access-x6c67\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.126090 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524202 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-54q8p" event={"ID":"a9337539-212d-4ad9-9572-80712d40784d","Type":"ContainerDied","Data":"544cddc5f5072475c64caadac2c73e6cd911835214bb4fb1945c77629948d28b"} Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524257 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="544cddc5f5072475c64caadac2c73e6cd911835214bb4fb1945c77629948d28b" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524262 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-54q8p" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524283 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524301 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524633 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.524769 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.831131 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6c8cb9cfb7-n54hp"] Dec 01 19:52:16 crc kubenswrapper[4888]: E1201 19:52:16.832470 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9337539-212d-4ad9-9572-80712d40784d" containerName="keystone-bootstrap" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.832492 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9337539-212d-4ad9-9572-80712d40784d" containerName="keystone-bootstrap" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.833033 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9337539-212d-4ad9-9572-80712d40784d" containerName="keystone-bootstrap" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.841684 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.850978 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.851781 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.851973 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.852215 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-skcf5" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.852352 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.852477 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.872416 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c8cb9cfb7-n54hp"] Dec 01 19:52:16 crc kubenswrapper[4888]: I1201 19:52:16.874657 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032172 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-config-data\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032242 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-scripts\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032263 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-internal-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032317 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-combined-ca-bundle\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032334 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-credential-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032352 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-fernet-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032388 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-public-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.032424 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzx9f\" (UniqueName: \"kubernetes.io/projected/6c61e8e0-f725-45ed-8a82-740b3243120d-kube-api-access-qzx9f\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.081875 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.082411 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.133686 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-config-data\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.133751 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-scripts\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.133784 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-internal-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.133860 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-combined-ca-bundle\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.133972 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-credential-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.134008 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-fernet-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.134063 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-public-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.134127 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzx9f\" (UniqueName: \"kubernetes.io/projected/6c61e8e0-f725-45ed-8a82-740b3243120d-kube-api-access-qzx9f\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.142571 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-scripts\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.156792 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-internal-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.174558 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-combined-ca-bundle\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.176708 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-public-tls-certs\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.179762 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-fernet-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.179968 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-config-data\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.180669 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzx9f\" (UniqueName: \"kubernetes.io/projected/6c61e8e0-f725-45ed-8a82-740b3243120d-kube-api-access-qzx9f\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.180785 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6c61e8e0-f725-45ed-8a82-740b3243120d-credential-keys\") pod \"keystone-6c8cb9cfb7-n54hp\" (UID: \"6c61e8e0-f725-45ed-8a82-740b3243120d\") " pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.209969 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:17 crc kubenswrapper[4888]: E1201 19:52:17.231887 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache]" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.532691 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:52:17 crc kubenswrapper[4888]: I1201 19:52:17.744347 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 19:52:18 crc kubenswrapper[4888]: I1201 19:52:18.976504 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.069287 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.069598 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="dnsmasq-dns" containerID="cri-o://8acf1013fd4d03b9021d8bebebd562581de391dc1d0f51b4ed6dc95453ff340c" gracePeriod=10 Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.625523 4888 generic.go:334] "Generic (PLEG): container finished" podID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerID="8acf1013fd4d03b9021d8bebebd562581de391dc1d0f51b4ed6dc95453ff340c" exitCode=0 Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.625954 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerDied","Data":"8acf1013fd4d03b9021d8bebebd562581de391dc1d0f51b4ed6dc95453ff340c"} Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.742124 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.902782 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.902822 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.902894 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w9mm\" (UniqueName: \"kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.902954 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.902981 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.903019 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config\") pod \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\" (UID: \"a3dcfdef-7ab0-4e79-b56e-3d3536a60538\") " Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.911364 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm" (OuterVolumeSpecName: "kube-api-access-7w9mm") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "kube-api-access-7w9mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.968499 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config" (OuterVolumeSpecName: "config") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.975451 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.986411 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c8cb9cfb7-n54hp"] Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.988594 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:19 crc kubenswrapper[4888]: I1201 19:52:19.994623 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.006308 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.006338 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w9mm\" (UniqueName: \"kubernetes.io/projected/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-kube-api-access-7w9mm\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.006351 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.006361 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.006370 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.011830 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3dcfdef-7ab0-4e79-b56e-3d3536a60538" (UID: "a3dcfdef-7ab0-4e79-b56e-3d3536a60538"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.039348 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.039409 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.039482 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.040465 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.040524 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e" gracePeriod=600 Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.111107 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3dcfdef-7ab0-4e79-b56e-3d3536a60538-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.636989 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c8cb9cfb7-n54hp" event={"ID":"6c61e8e0-f725-45ed-8a82-740b3243120d","Type":"ContainerStarted","Data":"70da774c02753033bc137eaeaec682dd4513b8550d400742ff63421514a5f2a3"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.637320 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c8cb9cfb7-n54hp" event={"ID":"6c61e8e0-f725-45ed-8a82-740b3243120d","Type":"ContainerStarted","Data":"e928cb8887cc9fc1abb07575ba74086fa5d410f227dee1300acdec4371759443"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.637503 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.638759 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" event={"ID":"a3dcfdef-7ab0-4e79-b56e-3d3536a60538","Type":"ContainerDied","Data":"e1c7e993c0b297f9fc9735b13f03274dc72873246ef446a20717ead68795580e"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.638792 4888 scope.go:117] "RemoveContainer" containerID="8acf1013fd4d03b9021d8bebebd562581de391dc1d0f51b4ed6dc95453ff340c" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.638901 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-67pd6" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.646624 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e" exitCode=0 Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.646723 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.646750 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.651485 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xk69h" event={"ID":"047835ab-4a66-4ff8-9252-c9c5ca0d0352","Type":"ContainerStarted","Data":"35e2de823012c9ae66ff9e1704d6fa1805e591772a3eeef43db4db76756c9c54"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.657795 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerStarted","Data":"415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.659558 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tnc9l" event={"ID":"e867ec9b-1972-4745-8dea-944cc62c6db5","Type":"ContainerStarted","Data":"21d149c86b7473dc1c67882266bd7044406705d15a6287d62e5733d7955d4b5f"} Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.670447 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6c8cb9cfb7-n54hp" podStartSLOduration=4.67041988 podStartE2EDuration="4.67041988s" podCreationTimestamp="2025-12-01 19:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:20.661717279 +0000 UTC m=+1140.532747193" watchObservedRunningTime="2025-12-01 19:52:20.67041988 +0000 UTC m=+1140.541449794" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.687940 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-tnc9l" podStartSLOduration=3.394077548 podStartE2EDuration="48.687914413s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="2025-12-01 19:51:34.196463086 +0000 UTC m=+1094.067493000" lastFinishedPulling="2025-12-01 19:52:19.490299951 +0000 UTC m=+1139.361329865" observedRunningTime="2025-12-01 19:52:20.684055402 +0000 UTC m=+1140.555085326" watchObservedRunningTime="2025-12-01 19:52:20.687914413 +0000 UTC m=+1140.558944327" Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.744141 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.754392 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-67pd6"] Dec 01 19:52:20 crc kubenswrapper[4888]: I1201 19:52:20.758680 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-xk69h" podStartSLOduration=3.467362518 podStartE2EDuration="48.75866285s" podCreationTimestamp="2025-12-01 19:51:32 +0000 UTC" firstStartedPulling="2025-12-01 19:51:34.196336382 +0000 UTC m=+1094.067366296" lastFinishedPulling="2025-12-01 19:52:19.487636714 +0000 UTC m=+1139.358666628" observedRunningTime="2025-12-01 19:52:20.744041639 +0000 UTC m=+1140.615071573" watchObservedRunningTime="2025-12-01 19:52:20.75866285 +0000 UTC m=+1140.629692764" Dec 01 19:52:21 crc kubenswrapper[4888]: I1201 19:52:21.093081 4888 scope.go:117] "RemoveContainer" containerID="dbf6fc0958d95528a55edf2df8c8def0e9328adbadc5e96e16bbcb3cb235cdf5" Dec 01 19:52:21 crc kubenswrapper[4888]: I1201 19:52:21.121117 4888 scope.go:117] "RemoveContainer" containerID="58584bf8bcac66e4a4cd4859748dc8d0de28a0b3189b80fe000dfedb264e50e0" Dec 01 19:52:22 crc kubenswrapper[4888]: I1201 19:52:22.469552 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" path="/var/lib/kubelet/pods/a3dcfdef-7ab0-4e79-b56e-3d3536a60538/volumes" Dec 01 19:52:24 crc kubenswrapper[4888]: I1201 19:52:24.510793 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 01 19:52:24 crc kubenswrapper[4888]: I1201 19:52:24.607778 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-d8bccccd8-fw8bk" podUID="5006252a-8f29-475c-9847-e2d6662ff13b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 01 19:52:24 crc kubenswrapper[4888]: I1201 19:52:24.730231 4888 generic.go:334] "Generic (PLEG): container finished" podID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" containerID="35e2de823012c9ae66ff9e1704d6fa1805e591772a3eeef43db4db76756c9c54" exitCode=0 Dec 01 19:52:24 crc kubenswrapper[4888]: I1201 19:52:24.730589 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xk69h" event={"ID":"047835ab-4a66-4ff8-9252-c9c5ca0d0352","Type":"ContainerDied","Data":"35e2de823012c9ae66ff9e1704d6fa1805e591772a3eeef43db4db76756c9c54"} Dec 01 19:52:26 crc kubenswrapper[4888]: I1201 19:52:26.761315 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tnc9l" event={"ID":"e867ec9b-1972-4745-8dea-944cc62c6db5","Type":"ContainerDied","Data":"21d149c86b7473dc1c67882266bd7044406705d15a6287d62e5733d7955d4b5f"} Dec 01 19:52:26 crc kubenswrapper[4888]: I1201 19:52:26.761262 4888 generic.go:334] "Generic (PLEG): container finished" podID="e867ec9b-1972-4745-8dea-944cc62c6db5" containerID="21d149c86b7473dc1c67882266bd7044406705d15a6287d62e5733d7955d4b5f" exitCode=0 Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.552317 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xk69h" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.625970 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data\") pod \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.626014 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle\") pod \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.626049 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btz6w\" (UniqueName: \"kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w\") pod \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\" (UID: \"047835ab-4a66-4ff8-9252-c9c5ca0d0352\") " Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.635723 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "047835ab-4a66-4ff8-9252-c9c5ca0d0352" (UID: "047835ab-4a66-4ff8-9252-c9c5ca0d0352"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.667415 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w" (OuterVolumeSpecName: "kube-api-access-btz6w") pod "047835ab-4a66-4ff8-9252-c9c5ca0d0352" (UID: "047835ab-4a66-4ff8-9252-c9c5ca0d0352"). InnerVolumeSpecName "kube-api-access-btz6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.674127 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "047835ab-4a66-4ff8-9252-c9c5ca0d0352" (UID: "047835ab-4a66-4ff8-9252-c9c5ca0d0352"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.730425 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.730467 4888 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/047835ab-4a66-4ff8-9252-c9c5ca0d0352-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.730483 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btz6w\" (UniqueName: \"kubernetes.io/projected/047835ab-4a66-4ff8-9252-c9c5ca0d0352-kube-api-access-btz6w\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.793460 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xk69h" event={"ID":"047835ab-4a66-4ff8-9252-c9c5ca0d0352","Type":"ContainerDied","Data":"8afd10da97b986c38c5725978a02b5e46f5f6aaf0a2066797f35c557c3e342d5"} Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.793494 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8afd10da97b986c38c5725978a02b5e46f5f6aaf0a2066797f35c557c3e342d5" Dec 01 19:52:29 crc kubenswrapper[4888]: I1201 19:52:29.793543 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xk69h" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.892367 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-79c4fd898c-nlfgq"] Dec 01 19:52:30 crc kubenswrapper[4888]: E1201 19:52:30.893532 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="init" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.893550 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="init" Dec 01 19:52:30 crc kubenswrapper[4888]: E1201 19:52:30.893577 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" containerName="barbican-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.893584 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" containerName="barbican-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: E1201 19:52:30.893606 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="dnsmasq-dns" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.893613 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="dnsmasq-dns" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.893824 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" containerName="barbican-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.893839 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dcfdef-7ab0-4e79-b56e-3d3536a60538" containerName="dnsmasq-dns" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.895386 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.898928 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.906906 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-5fpcd" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.917509 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.918459 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.928506 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-tnc9l" event={"ID":"e867ec9b-1972-4745-8dea-944cc62c6db5","Type":"ContainerDied","Data":"c5b982a1403bc0c37df0593e682017ffa3e292c9fd8defb954050208b02d6b38"} Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.928590 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5b982a1403bc0c37df0593e682017ffa3e292c9fd8defb954050208b02d6b38" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.932194 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-8db6f5c5d-bb5x6"] Dec 01 19:52:30 crc kubenswrapper[4888]: E1201 19:52:30.932693 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" containerName="cinder-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.932708 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" containerName="cinder-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.932919 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" containerName="cinder-db-sync" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.938104 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.943512 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 01 19:52:30 crc kubenswrapper[4888]: I1201 19:52:30.963156 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-79c4fd898c-nlfgq"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.015273 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8db6f5c5d-bb5x6"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067486 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067533 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067562 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067730 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067866 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbf64\" (UniqueName: \"kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.067899 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id\") pod \"e867ec9b-1972-4745-8dea-944cc62c6db5\" (UID: \"e867ec9b-1972-4745-8dea-944cc62c6db5\") " Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068100 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-combined-ca-bundle\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068133 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068169 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13de5904-4edb-417c-aa16-c1690ba7a828-logs\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068232 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-combined-ca-bundle\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068268 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm6db\" (UniqueName: \"kubernetes.io/projected/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-kube-api-access-pm6db\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068292 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068308 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data-custom\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068337 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6vwm\" (UniqueName: \"kubernetes.io/projected/13de5904-4edb-417c-aa16-c1690ba7a828-kube-api-access-f6vwm\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068368 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-logs\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.068414 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data-custom\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.077050 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.079362 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64" (OuterVolumeSpecName: "kube-api-access-pbf64") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "kube-api-access-pbf64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.079602 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts" (OuterVolumeSpecName: "scripts") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.093536 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.111226 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.112980 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.124252 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.132787 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.201766 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data-custom\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.201856 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-combined-ca-bundle\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.201903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.201949 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13de5904-4edb-417c-aa16-c1690ba7a828-logs\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.201977 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-combined-ca-bundle\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202021 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm6db\" (UniqueName: \"kubernetes.io/projected/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-kube-api-access-pm6db\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202059 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202093 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data-custom\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202136 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6vwm\" (UniqueName: \"kubernetes.io/projected/13de5904-4edb-417c-aa16-c1690ba7a828-kube-api-access-f6vwm\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202172 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-logs\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202288 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbf64\" (UniqueName: \"kubernetes.io/projected/e867ec9b-1972-4745-8dea-944cc62c6db5-kube-api-access-pbf64\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202304 4888 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e867ec9b-1972-4745-8dea-944cc62c6db5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202315 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202325 4888 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202337 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.202829 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-logs\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.204568 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13de5904-4edb-417c-aa16-c1690ba7a828-logs\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.213342 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-combined-ca-bundle\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.214096 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.222924 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data-custom\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.225988 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-combined-ca-bundle\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.251135 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm6db\" (UniqueName: \"kubernetes.io/projected/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-kube-api-access-pm6db\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.270688 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f-config-data-custom\") pod \"barbican-keystone-listener-8db6f5c5d-bb5x6\" (UID: \"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f\") " pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.285804 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.287517 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.291942 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.296417 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.297055 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6vwm\" (UniqueName: \"kubernetes.io/projected/13de5904-4edb-417c-aa16-c1690ba7a828-kube-api-access-f6vwm\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.303870 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.303985 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.304031 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.304055 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.304106 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.304129 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp9lk\" (UniqueName: \"kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.308788 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13de5904-4edb-417c-aa16-c1690ba7a828-config-data\") pod \"barbican-worker-79c4fd898c-nlfgq\" (UID: \"13de5904-4edb-417c-aa16-c1690ba7a828\") " pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.323351 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.406870 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.407295 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.407447 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.407598 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.407720 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.407848 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.408082 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.409125 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.410904 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.413636 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.414260 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.414836 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp9lk\" (UniqueName: \"kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.418492 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.421813 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5khg6\" (UniqueName: \"kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.421891 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.423688 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.436869 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp9lk\" (UniqueName: \"kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk\") pod \"dnsmasq-dns-85ff748b95-w42nc\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.437177 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data" (OuterVolumeSpecName: "config-data") pod "e867ec9b-1972-4745-8dea-944cc62c6db5" (UID: "e867ec9b-1972-4745-8dea-944cc62c6db5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524008 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5khg6\" (UniqueName: \"kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524098 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524140 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524211 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524379 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524535 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e867ec9b-1972-4745-8dea-944cc62c6db5-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.524714 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.533237 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.533338 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.537321 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.541988 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5khg6\" (UniqueName: \"kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6\") pod \"barbican-api-745445fc78-xrn2z\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.545827 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-79c4fd898c-nlfgq" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.693718 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.705859 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:31 crc kubenswrapper[4888]: I1201 19:52:31.953045 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-tnc9l" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.290670 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.293052 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.301143 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.301378 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.301493 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.301612 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-dkpkt" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.316648 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468437 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468527 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468552 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z29b\" (UniqueName: \"kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468571 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468594 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.468662 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.572226 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.572822 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.572859 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z29b\" (UniqueName: \"kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.572887 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.572919 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.573044 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.574224 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.579032 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.582938 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.594669 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.628090 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.647759 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z29b\" (UniqueName: \"kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b\") pod \"cinder-scheduler-0\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.726075 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.726119 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.728034 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.728063 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.728861 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.729867 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.756297 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.759832 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.880574 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881020 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881062 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881086 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881107 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881146 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881165 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lhxz\" (UniqueName: \"kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881205 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881234 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881257 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881292 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mg6w\" (UniqueName: \"kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881498 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.881609 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.946273 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985104 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985162 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lhxz\" (UniqueName: \"kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985269 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985290 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985318 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985368 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mg6w\" (UniqueName: \"kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985400 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985442 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985505 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985540 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985590 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985615 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.985641 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.987432 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.987518 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.988426 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.989021 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.989733 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.990327 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:32 crc kubenswrapper[4888]: I1201 19:52:32.991914 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.004945 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.012569 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.018181 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.023306 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lhxz\" (UniqueName: \"kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.025074 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mg6w\" (UniqueName: \"kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w\") pod \"dnsmasq-dns-5c9776ccc5-ffc2d\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.034862 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts\") pod \"cinder-api-0\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: E1201 19:52:33.150194 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.150280 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.297083 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.415454 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.635365 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-79c4fd898c-nlfgq"] Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.646047 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.704308 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8db6f5c5d-bb5x6"] Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.882722 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:33 crc kubenswrapper[4888]: I1201 19:52:33.984092 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.117406 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.178735 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerStarted","Data":"6fd10254dab6b7c9b9902c990b6f3ba9832d62c882a12179e1607057955ad95f"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.180164 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerStarted","Data":"71c4740962bb7af9aa92dcf0b5c307f603dc53c0167f529f5ef9be5d77ab81ce"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.184819 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" event={"ID":"d2f13dac-63ca-4642-a12a-ebf2e6d344ed","Type":"ContainerStarted","Data":"840bb35cea264c8a7856214e26ec6d175faa8d641b04236c695f07623d09f03f"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.187205 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79c4fd898c-nlfgq" event={"ID":"13de5904-4edb-417c-aa16-c1690ba7a828","Type":"ContainerStarted","Data":"d136451435fe87bf35f1ce2d73f9896978371e474d8af2ad7b1334fc1f512938"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.197308 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" event={"ID":"e325858d-ad0b-40ca-8cb1-5b2b14bdc908","Type":"ContainerStarted","Data":"657a5dcef87ec0dafff1dee266fe9b720348c3ffe50e0076b5aebd7bde671c74"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.219544 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" event={"ID":"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f","Type":"ContainerStarted","Data":"79f546e210d137983a940097695fdb276808803fc232522542bbcfe5043c82e3"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.225999 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerStarted","Data":"37038d0d766494b4ca0a65619d56d5d2fea5d8a61981f13a35593ff44612cd62"} Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.226174 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="ceilometer-notification-agent" containerID="cri-o://0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8" gracePeriod=30 Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.226476 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.226778 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="proxy-httpd" containerID="cri-o://37038d0d766494b4ca0a65619d56d5d2fea5d8a61981f13a35593ff44612cd62" gracePeriod=30 Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.226835 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="sg-core" containerID="cri-o://415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c" gracePeriod=30 Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.331488 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.509966 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.609094 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-d8bccccd8-fw8bk" podUID="5006252a-8f29-475c-9847-e2d6662ff13b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 01 19:52:34 crc kubenswrapper[4888]: I1201 19:52:34.997691 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.293837 4888 generic.go:334] "Generic (PLEG): container finished" podID="d2f13dac-63ca-4642-a12a-ebf2e6d344ed" containerID="169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999" exitCode=0 Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.294598 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" event={"ID":"d2f13dac-63ca-4642-a12a-ebf2e6d344ed","Type":"ContainerDied","Data":"169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.322068 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerStarted","Data":"cf0222710c6a7bf713a630599720626ce9d5839395658ab615d3a97cb234faaf"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.352070 4888 generic.go:334] "Generic (PLEG): container finished" podID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerID="b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2" exitCode=0 Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.352244 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" event={"ID":"e325858d-ad0b-40ca-8cb1-5b2b14bdc908","Type":"ContainerDied","Data":"b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.387220 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerDied","Data":"37038d0d766494b4ca0a65619d56d5d2fea5d8a61981f13a35593ff44612cd62"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.387270 4888 generic.go:334] "Generic (PLEG): container finished" podID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerID="37038d0d766494b4ca0a65619d56d5d2fea5d8a61981f13a35593ff44612cd62" exitCode=0 Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.387291 4888 generic.go:334] "Generic (PLEG): container finished" podID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerID="415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c" exitCode=2 Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.388803 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerDied","Data":"415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.397871 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerStarted","Data":"7299a937fe97cbc2ce31bd3aa342161fee45f0a040f1898e5d4644bedb568490"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.397946 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerStarted","Data":"cc690ce78641323b443479779f18a10a264628d0bc41dd7f95d0884cd4ed98f2"} Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.399349 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.399450 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:35 crc kubenswrapper[4888]: I1201 19:52:35.452927 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-745445fc78-xrn2z" podStartSLOduration=4.452905987 podStartE2EDuration="4.452905987s" podCreationTimestamp="2025-12-01 19:52:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:35.446951686 +0000 UTC m=+1155.317981620" watchObservedRunningTime="2025-12-01 19:52:35.452905987 +0000 UTC m=+1155.323935901" Dec 01 19:52:36 crc kubenswrapper[4888]: W1201 19:52:36.271932 4888 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2f13dac_63ca_4642_a12a_ebf2e6d344ed.slice/crio-conmon-169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2f13dac_63ca_4642_a12a_ebf2e6d344ed.slice/crio-conmon-169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999.scope: no such file or directory Dec 01 19:52:36 crc kubenswrapper[4888]: W1201 19:52:36.296530 4888 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2f13dac_63ca_4642_a12a_ebf2e6d344ed.slice/crio-169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2f13dac_63ca_4642_a12a_ebf2e6d344ed.slice/crio-169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999.scope: no such file or directory Dec 01 19:52:36 crc kubenswrapper[4888]: W1201 19:52:36.301446 4888 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode325858d_ad0b_40ca_8cb1_5b2b14bdc908.slice/crio-conmon-b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode325858d_ad0b_40ca_8cb1_5b2b14bdc908.slice/crio-conmon-b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2.scope: no such file or directory Dec 01 19:52:36 crc kubenswrapper[4888]: W1201 19:52:36.333687 4888 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode325858d_ad0b_40ca_8cb1_5b2b14bdc908.slice/crio-b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode325858d_ad0b_40ca_8cb1_5b2b14bdc908.slice/crio-b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2.scope: no such file or directory Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.421975 4888 generic.go:334] "Generic (PLEG): container finished" podID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerID="31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.422015 4888 generic.go:334] "Generic (PLEG): container finished" podID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerID="c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.422095 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerDied","Data":"31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.422131 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerDied","Data":"c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.425703 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerStarted","Data":"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.430460 4888 generic.go:334] "Generic (PLEG): container finished" podID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerID="0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8" exitCode=0 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.430504 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerDied","Data":"0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.436944 4888 generic.go:334] "Generic (PLEG): container finished" podID="95ea9163-ee66-4f38-a239-4c6592864892" containerID="ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.437114 4888 generic.go:334] "Generic (PLEG): container finished" podID="95ea9163-ee66-4f38-a239-4c6592864892" containerID="4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.437250 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerDied","Data":"ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.437325 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerDied","Data":"4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.442846 4888 generic.go:334] "Generic (PLEG): container finished" podID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerID="c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.442988 4888 generic.go:334] "Generic (PLEG): container finished" podID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerID="f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4" exitCode=137 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.443103 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerDied","Data":"c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.443176 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerDied","Data":"f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4"} Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.452137 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerStarted","Data":"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a"} Dec 01 19:52:36 crc kubenswrapper[4888]: E1201 19:52:36.597906 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode867ec9b_1972_4745_8dea_944cc62c6db5.slice/crio-c5b982a1403bc0c37df0593e682017ffa3e292c9fd8defb954050208b02d6b38\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9a19cb_f999_49ca_89f8_e5ab13e453a4.slice/crio-415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95ea9163_ee66_4f38_a239_4c6592864892.slice/crio-conmon-4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf738f310_2f55_4a77_b9e9_b654891b3ef0.slice/crio-c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f7bb90_fce7_4f77_a9d0_eaa5089e4c6a.slice/crio-c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95ea9163_ee66_4f38_a239_4c6592864892.slice/crio-4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f7bb90_fce7_4f77_a9d0_eaa5089e4c6a.slice/crio-conmon-31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf738f310_2f55_4a77_b9e9_b654891b3ef0.slice/crio-f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95ea9163_ee66_4f38_a239_4c6592864892.slice/crio-conmon-ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf738f310_2f55_4a77_b9e9_b654891b3ef0.slice/crio-conmon-c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9a19cb_f999_49ca_89f8_e5ab13e453a4.slice/crio-conmon-415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95ea9163_ee66_4f38_a239_4c6592864892.slice/crio-ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f794204_9db5_498d_b8a5_586ec3b9f921.slice/crio-49dd3b904763d00227e561fdc08020aff3faec0b6507d3a630e185063d903e7b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71f7bb90_fce7_4f77_a9d0_eaa5089e4c6a.slice/crio-31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9a19cb_f999_49ca_89f8_e5ab13e453a4.slice/crio-0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9a19cb_f999_49ca_89f8_e5ab13e453a4.slice/crio-conmon-0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode867ec9b_1972_4745_8dea_944cc62c6db5.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.635556 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-58df6cb45-qjhmp" Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.744289 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.745100 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7894c9f948-wgblk" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-httpd" containerID="cri-o://82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1" gracePeriod=30 Dec 01 19:52:36 crc kubenswrapper[4888]: I1201 19:52:36.744519 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7894c9f948-wgblk" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-api" containerID="cri-o://b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7" gracePeriod=30 Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.065152 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171088 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp9lk\" (UniqueName: \"kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171567 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171619 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171722 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171875 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.171914 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb\") pod \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\" (UID: \"d2f13dac-63ca-4642-a12a-ebf2e6d344ed\") " Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.182319 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk" (OuterVolumeSpecName: "kube-api-access-wp9lk") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "kube-api-access-wp9lk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.204584 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.211979 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.227298 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.232178 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config" (OuterVolumeSpecName: "config") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.248136 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d2f13dac-63ca-4642-a12a-ebf2e6d344ed" (UID: "d2f13dac-63ca-4642-a12a-ebf2e6d344ed"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274084 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274117 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274130 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274143 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274154 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.274165 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp9lk\" (UniqueName: \"kubernetes.io/projected/d2f13dac-63ca-4642-a12a-ebf2e6d344ed-kube-api-access-wp9lk\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.507960 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.509804 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-w42nc" event={"ID":"d2f13dac-63ca-4642-a12a-ebf2e6d344ed","Type":"ContainerDied","Data":"840bb35cea264c8a7856214e26ec6d175faa8d641b04236c695f07623d09f03f"} Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.509894 4888 scope.go:117] "RemoveContainer" containerID="169fa1e700f3aa87d2de50217419d09f49b8cff8b16aafa513f7350188945999" Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.521442 4888 generic.go:334] "Generic (PLEG): container finished" podID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerID="82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1" exitCode=0 Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.521483 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerDied","Data":"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1"} Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.592010 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:37 crc kubenswrapper[4888]: I1201 19:52:37.611588 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-w42nc"] Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.237140 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-75669cfdf8-fmlxq"] Dec 01 19:52:38 crc kubenswrapper[4888]: E1201 19:52:38.237774 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2f13dac-63ca-4642-a12a-ebf2e6d344ed" containerName="init" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.237791 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2f13dac-63ca-4642-a12a-ebf2e6d344ed" containerName="init" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.237998 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2f13dac-63ca-4642-a12a-ebf2e6d344ed" containerName="init" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.240032 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.245399 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.245624 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.272137 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75669cfdf8-fmlxq"] Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436305 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436355 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-internal-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436378 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-combined-ca-bundle\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436458 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data-custom\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436508 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-public-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436525 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a887275f-f805-4f46-962b-d54149803ab3-logs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.436547 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlm4f\" (UniqueName: \"kubernetes.io/projected/a887275f-f805-4f46-962b-d54149803ab3-kube-api-access-rlm4f\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.476607 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2f13dac-63ca-4642-a12a-ebf2e6d344ed" path="/var/lib/kubelet/pods/d2f13dac-63ca-4642-a12a-ebf2e6d344ed/volumes" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.540794 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541443 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data-custom\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541522 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-public-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541554 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a887275f-f805-4f46-962b-d54149803ab3-logs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541580 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlm4f\" (UniqueName: \"kubernetes.io/projected/a887275f-f805-4f46-962b-d54149803ab3-kube-api-access-rlm4f\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541628 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541643 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-internal-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.541663 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-combined-ca-bundle\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.543699 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a887275f-f805-4f46-962b-d54149803ab3-logs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.554613 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-internal-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.559046 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data-custom\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.560458 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-config-data\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.571226 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-public-tls-certs\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.580077 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.597000 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlm4f\" (UniqueName: \"kubernetes.io/projected/a887275f-f805-4f46-962b-d54149803ab3-kube-api-access-rlm4f\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.600056 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a887275f-f805-4f46-962b-d54149803ab3-combined-ca-bundle\") pod \"barbican-api-75669cfdf8-fmlxq\" (UID: \"a887275f-f805-4f46-962b-d54149803ab3\") " pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.602467 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce9a19cb-f999-49ca-89f8-e5ab13e453a4","Type":"ContainerDied","Data":"11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b"} Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.602513 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11d0e1ab7e4a076e8ee081be85e366f5c39a9a4f6d4b71dbbef05f8b1c6fbe2b" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.662940 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key\") pod \"95ea9163-ee66-4f38-a239-4c6592864892\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.663012 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts\") pod \"95ea9163-ee66-4f38-a239-4c6592864892\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.663076 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs\") pod \"95ea9163-ee66-4f38-a239-4c6592864892\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.663165 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c8z4\" (UniqueName: \"kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4\") pod \"95ea9163-ee66-4f38-a239-4c6592864892\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.663193 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data\") pod \"95ea9163-ee66-4f38-a239-4c6592864892\" (UID: \"95ea9163-ee66-4f38-a239-4c6592864892\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.667337 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs" (OuterVolumeSpecName: "logs") pod "95ea9163-ee66-4f38-a239-4c6592864892" (UID: "95ea9163-ee66-4f38-a239-4c6592864892"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.684597 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.701886 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4" (OuterVolumeSpecName: "kube-api-access-9c8z4") pod "95ea9163-ee66-4f38-a239-4c6592864892" (UID: "95ea9163-ee66-4f38-a239-4c6592864892"). InnerVolumeSpecName "kube-api-access-9c8z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.702697 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "95ea9163-ee66-4f38-a239-4c6592864892" (UID: "95ea9163-ee66-4f38-a239-4c6592864892"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.722304 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.722465 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7d745c59d5-8h7kr" event={"ID":"95ea9163-ee66-4f38-a239-4c6592864892","Type":"ContainerDied","Data":"45212ede3d182e371b32064366b11462a16cd097822fde25c8725d0ca263f562"} Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.722498 4888 scope.go:117] "RemoveContainer" containerID="ae8c28028dfa379f34e1b48728a6ff5d61ea629739eb2267a5d6357d33a40e4b" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.722570 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7d745c59d5-8h7kr" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.726418 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6666f788cc-5g6pk" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.726586 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6666f788cc-5g6pk" event={"ID":"f738f310-2f55-4a77-b9e9-b654891b3ef0","Type":"ContainerDied","Data":"11ac44600abd653e7fd8b69c21f0da29c57c01e64185316c12d99ee7c6e44b05"} Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766268 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766698 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766731 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key\") pod \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766751 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbkzn\" (UniqueName: \"kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766772 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs\") pod \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766808 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts\") pod \"f738f310-2f55-4a77-b9e9-b654891b3ef0\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766846 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data\") pod \"f738f310-2f55-4a77-b9e9-b654891b3ef0\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766866 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data\") pod \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766906 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766943 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.766972 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767012 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle\") pod \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\" (UID: \"ce9a19cb-f999-49ca-89f8-e5ab13e453a4\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767029 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pkj9\" (UniqueName: \"kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9\") pod \"f738f310-2f55-4a77-b9e9-b654891b3ef0\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767045 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts\") pod \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767066 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7svrf\" (UniqueName: \"kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf\") pod \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\" (UID: \"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767082 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs\") pod \"f738f310-2f55-4a77-b9e9-b654891b3ef0\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767100 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key\") pod \"f738f310-2f55-4a77-b9e9-b654891b3ef0\" (UID: \"f738f310-2f55-4a77-b9e9-b654891b3ef0\") " Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767440 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c8z4\" (UniqueName: \"kubernetes.io/projected/95ea9163-ee66-4f38-a239-4c6592864892-kube-api-access-9c8z4\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767456 4888 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95ea9163-ee66-4f38-a239-4c6592864892-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.767466 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95ea9163-ee66-4f38-a239-4c6592864892-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.772788 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.775426 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f76bc979c-6tnbl" event={"ID":"71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a","Type":"ContainerDied","Data":"26b42ecf1e757466771ad9b17d813c2f73e0e2849aa0e90695b4ec226ab4afb7"} Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.775528 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f76bc979c-6tnbl" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.778666 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data" (OuterVolumeSpecName: "config-data") pod "95ea9163-ee66-4f38-a239-4c6592864892" (UID: "95ea9163-ee66-4f38-a239-4c6592864892"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.779858 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs" (OuterVolumeSpecName: "logs") pod "f738f310-2f55-4a77-b9e9-b654891b3ef0" (UID: "f738f310-2f55-4a77-b9e9-b654891b3ef0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.784484 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs" (OuterVolumeSpecName: "logs") pod "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" (UID: "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.784610 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.819763 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn" (OuterVolumeSpecName: "kube-api-access-hbkzn") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "kube-api-access-hbkzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.821670 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9" (OuterVolumeSpecName: "kube-api-access-5pkj9") pod "f738f310-2f55-4a77-b9e9-b654891b3ef0" (UID: "f738f310-2f55-4a77-b9e9-b654891b3ef0"). InnerVolumeSpecName "kube-api-access-5pkj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.821882 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf" (OuterVolumeSpecName: "kube-api-access-7svrf") pod "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" (UID: "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a"). InnerVolumeSpecName "kube-api-access-7svrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.852970 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f738f310-2f55-4a77-b9e9-b654891b3ef0" (UID: "f738f310-2f55-4a77-b9e9-b654891b3ef0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.854729 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" (UID: "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.854492 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts" (OuterVolumeSpecName: "scripts") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874247 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874455 4888 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874569 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbkzn\" (UniqueName: \"kubernetes.io/projected/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-kube-api-access-hbkzn\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874624 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874675 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874724 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874775 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874826 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pkj9\" (UniqueName: \"kubernetes.io/projected/f738f310-2f55-4a77-b9e9-b654891b3ef0-kube-api-access-5pkj9\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874880 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7svrf\" (UniqueName: \"kubernetes.io/projected/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-kube-api-access-7svrf\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874931 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f738f310-2f55-4a77-b9e9-b654891b3ef0-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.874980 4888 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f738f310-2f55-4a77-b9e9-b654891b3ef0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:38 crc kubenswrapper[4888]: I1201 19:52:38.915832 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.053993 4888 scope.go:117] "RemoveContainer" containerID="4f64560d2cdc062ad55f338f8ee2d5fbae45e85003890060de1ff95fcafa496b" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.103954 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts" (OuterVolumeSpecName: "scripts") pod "95ea9163-ee66-4f38-a239-4c6592864892" (UID: "95ea9163-ee66-4f38-a239-4c6592864892"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.135432 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts" (OuterVolumeSpecName: "scripts") pod "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" (UID: "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.194838 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95ea9163-ee66-4f38-a239-4c6592864892-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.195272 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.205441 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data" (OuterVolumeSpecName: "config-data") pod "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" (UID: "71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.215022 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data" (OuterVolumeSpecName: "config-data") pod "f738f310-2f55-4a77-b9e9-b654891b3ef0" (UID: "f738f310-2f55-4a77-b9e9-b654891b3ef0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.224047 4888 scope.go:117] "RemoveContainer" containerID="c0e8fcb843775bccd8a07a61c30fbcf768325177ed8105b316b10d46f17be9a7" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.224627 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts" (OuterVolumeSpecName: "scripts") pod "f738f310-2f55-4a77-b9e9-b654891b3ef0" (UID: "f738f310-2f55-4a77-b9e9-b654891b3ef0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.227429 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.296693 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.296720 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f738f310-2f55-4a77-b9e9-b654891b3ef0-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.296730 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.296739 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.298323 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data" (OuterVolumeSpecName: "config-data") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.310535 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce9a19cb-f999-49ca-89f8-e5ab13e453a4" (UID: "ce9a19cb-f999-49ca-89f8-e5ab13e453a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.399255 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.399742 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9a19cb-f999-49ca-89f8-e5ab13e453a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.603845 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.650469 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6666f788cc-5g6pk"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.691938 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.695778 4888 scope.go:117] "RemoveContainer" containerID="f56a3cddbddb27b668149d25c95107ba1a7dbd594ef4b01a3d0783ebfeab43c4" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.730015 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7f76bc979c-6tnbl"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.746860 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.753753 4888 scope.go:117] "RemoveContainer" containerID="31fd18a5cf440dd28b6c95712f4879728043beea4825ccfd1f6ebd7edf04308a" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.778799 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7d745c59d5-8h7kr"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.828298 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75669cfdf8-fmlxq"] Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.832669 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api-log" containerID="cri-o://928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197" gracePeriod=30 Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.833050 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerStarted","Data":"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce"} Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.833103 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.833582 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api" containerID="cri-o://ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce" gracePeriod=30 Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.848702 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" event={"ID":"e325858d-ad0b-40ca-8cb1-5b2b14bdc908","Type":"ContainerStarted","Data":"fbd5c0ef319cc65056c2b8875f83836d53a7047c616e4eae7943fbcf89d5c736"} Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.849839 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.873606 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" event={"ID":"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f","Type":"ContainerStarted","Data":"d3fb60c998af483f786b0b70cf4a6f2b1d373b41558357a2e748ccdf285b99cc"} Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.873742 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.865184403 podStartE2EDuration="7.865184403s" podCreationTimestamp="2025-12-01 19:52:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:39.862099064 +0000 UTC m=+1159.733128998" watchObservedRunningTime="2025-12-01 19:52:39.865184403 +0000 UTC m=+1159.736214317" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.897838 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" podStartSLOduration=7.8978125420000005 podStartE2EDuration="7.897812542s" podCreationTimestamp="2025-12-01 19:52:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:39.886771704 +0000 UTC m=+1159.757801628" watchObservedRunningTime="2025-12-01 19:52:39.897812542 +0000 UTC m=+1159.768842466" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.956838 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerStarted","Data":"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190"} Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.968037 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.968168 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79c4fd898c-nlfgq" event={"ID":"13de5904-4edb-417c-aa16-c1690ba7a828","Type":"ContainerStarted","Data":"677cbaf029f11e07c8d8f1458bf3ef1e6445a359381563a32e83bf0309db947c"} Dec 01 19:52:39 crc kubenswrapper[4888]: I1201 19:52:39.994554 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.819265977 podStartE2EDuration="7.994531006s" podCreationTimestamp="2025-12-01 19:52:32 +0000 UTC" firstStartedPulling="2025-12-01 19:52:33.942731898 +0000 UTC m=+1153.813761822" lastFinishedPulling="2025-12-01 19:52:35.117996937 +0000 UTC m=+1154.989026851" observedRunningTime="2025-12-01 19:52:39.975792567 +0000 UTC m=+1159.846822511" watchObservedRunningTime="2025-12-01 19:52:39.994531006 +0000 UTC m=+1159.865560920" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.038735 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-79c4fd898c-nlfgq" podStartSLOduration=5.329503636 podStartE2EDuration="10.038713638s" podCreationTimestamp="2025-12-01 19:52:30 +0000 UTC" firstStartedPulling="2025-12-01 19:52:33.649135797 +0000 UTC m=+1153.520165711" lastFinishedPulling="2025-12-01 19:52:38.358345799 +0000 UTC m=+1158.229375713" observedRunningTime="2025-12-01 19:52:40.004713979 +0000 UTC m=+1159.875743893" watchObservedRunningTime="2025-12-01 19:52:40.038713638 +0000 UTC m=+1159.909743552" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.063369 4888 scope.go:117] "RemoveContainer" containerID="c2f8d822be3e5e79dbb55e692d401f3dfa43adcee668629812b620f656f43ca9" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.069365 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.081105 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.115472 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.115901 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="ceilometer-notification-agent" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.115921 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="ceilometer-notification-agent" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.115933 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.115939 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.115953 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.115960 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.115979 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.115987 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.116003 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116011 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.116022 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="proxy-httpd" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116028 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="proxy-httpd" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.116042 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="sg-core" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116047 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="sg-core" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.116058 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116063 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: E1201 19:52:40.116078 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116085 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116275 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="sg-core" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116290 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116301 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116310 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116323 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="ceilometer-notification-agent" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116334 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon-log" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116346 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="95ea9163-ee66-4f38-a239-4c6592864892" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116357 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" containerName="horizon" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.116365 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" containerName="proxy-httpd" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.118342 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.134479 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.135822 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.144822 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246208 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246283 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246308 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246335 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246374 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246404 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgk2w\" (UniqueName: \"kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.246462 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348365 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348459 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348486 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348524 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348580 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348684 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgk2w\" (UniqueName: \"kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.348760 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.349926 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.350272 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.361402 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.361782 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.365110 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.382184 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.387376 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgk2w\" (UniqueName: \"kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w\") pod \"ceilometer-0\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.471092 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a" path="/var/lib/kubelet/pods/71f7bb90-fce7-4f77-a9d0-eaa5089e4c6a/volumes" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.472276 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95ea9163-ee66-4f38-a239-4c6592864892" path="/var/lib/kubelet/pods/95ea9163-ee66-4f38-a239-4c6592864892/volumes" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.473034 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce9a19cb-f999-49ca-89f8-e5ab13e453a4" path="/var/lib/kubelet/pods/ce9a19cb-f999-49ca-89f8-e5ab13e453a4/volumes" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.474754 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f738f310-2f55-4a77-b9e9-b654891b3ef0" path="/var/lib/kubelet/pods/f738f310-2f55-4a77-b9e9-b654891b3ef0/volumes" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.482975 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.983585 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" event={"ID":"af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f","Type":"ContainerStarted","Data":"500327cdc34b9ef6c6b69c5da5dec0b0633ac111ce3094ba66dee0fd1d60b8e2"} Dec 01 19:52:40 crc kubenswrapper[4888]: I1201 19:52:40.992729 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-79c4fd898c-nlfgq" event={"ID":"13de5904-4edb-417c-aa16-c1690ba7a828","Type":"ContainerStarted","Data":"f52a20cf65dd10bd1cfdec8940f1942dfd5fd88b6b714989f5016020fbd08ea1"} Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.002725 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75669cfdf8-fmlxq" event={"ID":"a887275f-f805-4f46-962b-d54149803ab3","Type":"ContainerStarted","Data":"8537188e49faa0969f5c590fde6b1cc60ce903673fd4ce1f636d9109e4c6cf43"} Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.002773 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75669cfdf8-fmlxq" event={"ID":"a887275f-f805-4f46-962b-d54149803ab3","Type":"ContainerStarted","Data":"a9668631934e18ce423af2d338c65ccb732a2c989522f099e0cd235de8b12fc8"} Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.002784 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75669cfdf8-fmlxq" event={"ID":"a887275f-f805-4f46-962b-d54149803ab3","Type":"ContainerStarted","Data":"8dbb333dc61fe4f93444b3f8caf821ddd4ca5a7e0fe00ca5e2007ff3c62402db"} Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.003491 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.003512 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.004001 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-8db6f5c5d-bb5x6" podStartSLOduration=6.384087942 podStartE2EDuration="11.003980593s" podCreationTimestamp="2025-12-01 19:52:30 +0000 UTC" firstStartedPulling="2025-12-01 19:52:33.739500718 +0000 UTC m=+1153.610530632" lastFinishedPulling="2025-12-01 19:52:38.359393369 +0000 UTC m=+1158.230423283" observedRunningTime="2025-12-01 19:52:40.999587896 +0000 UTC m=+1160.870617800" watchObservedRunningTime="2025-12-01 19:52:41.003980593 +0000 UTC m=+1160.875010507" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.006320 4888 generic.go:334] "Generic (PLEG): container finished" podID="aa098f68-2731-4769-a237-6b568758e588" containerID="928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197" exitCode=143 Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.007007 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerDied","Data":"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197"} Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.033527 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-75669cfdf8-fmlxq" podStartSLOduration=3.033486702 podStartE2EDuration="3.033486702s" podCreationTimestamp="2025-12-01 19:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:41.02749634 +0000 UTC m=+1160.898526254" watchObservedRunningTime="2025-12-01 19:52:41.033486702 +0000 UTC m=+1160.904516616" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.160325 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:41 crc kubenswrapper[4888]: W1201 19:52:41.168474 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd5df4c6_7032_4503_b030_f258e37e6c47.slice/crio-fc98f88831a0d1e29f297ff8536f34222affdd779c111fc5a9c44e2e6f94ff89 WatchSource:0}: Error finding container fc98f88831a0d1e29f297ff8536f34222affdd779c111fc5a9c44e2e6f94ff89: Status 404 returned error can't find the container with id fc98f88831a0d1e29f297ff8536f34222affdd779c111fc5a9c44e2e6f94ff89 Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.643767 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.644739 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6498684f7d-hltrn" Dec 01 19:52:41 crc kubenswrapper[4888]: I1201 19:52:41.971382 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.039212 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerStarted","Data":"fc98f88831a0d1e29f297ff8536f34222affdd779c111fc5a9c44e2e6f94ff89"} Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.054547 4888 generic.go:334] "Generic (PLEG): container finished" podID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerID="b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7" exitCode=0 Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.057303 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7894c9f948-wgblk" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.058334 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerDied","Data":"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7"} Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.058435 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7894c9f948-wgblk" event={"ID":"6cff0163-3bf9-4979-90e1-cb5ee39ad044","Type":"ContainerDied","Data":"f515790c70d1eead67fe9252c7b6223092857b895f50d02c76a3c3fc28b6092c"} Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.058505 4888 scope.go:117] "RemoveContainer" containerID="82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.114246 4888 scope.go:117] "RemoveContainer" containerID="b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.121646 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlll8\" (UniqueName: \"kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8\") pod \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.121741 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs\") pod \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.121764 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle\") pod \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.121822 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config\") pod \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.121915 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config\") pod \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\" (UID: \"6cff0163-3bf9-4979-90e1-cb5ee39ad044\") " Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.143364 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6cff0163-3bf9-4979-90e1-cb5ee39ad044" (UID: "6cff0163-3bf9-4979-90e1-cb5ee39ad044"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.143429 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8" (OuterVolumeSpecName: "kube-api-access-hlll8") pod "6cff0163-3bf9-4979-90e1-cb5ee39ad044" (UID: "6cff0163-3bf9-4979-90e1-cb5ee39ad044"). InnerVolumeSpecName "kube-api-access-hlll8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.166445 4888 scope.go:117] "RemoveContainer" containerID="82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1" Dec 01 19:52:42 crc kubenswrapper[4888]: E1201 19:52:42.167785 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1\": container with ID starting with 82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1 not found: ID does not exist" containerID="82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.167831 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1"} err="failed to get container status \"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1\": rpc error: code = NotFound desc = could not find container \"82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1\": container with ID starting with 82e319daaf9619b30178ba126975c8090b615720eac6f8bf8ccece264f5d7ec1 not found: ID does not exist" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.167858 4888 scope.go:117] "RemoveContainer" containerID="b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7" Dec 01 19:52:42 crc kubenswrapper[4888]: E1201 19:52:42.168270 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7\": container with ID starting with b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7 not found: ID does not exist" containerID="b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.168290 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7"} err="failed to get container status \"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7\": rpc error: code = NotFound desc = could not find container \"b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7\": container with ID starting with b8d234ed279fcded800d3c0da9ed8e9342423528fae3d9206696b6c83e6facb7 not found: ID does not exist" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.226252 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlll8\" (UniqueName: \"kubernetes.io/projected/6cff0163-3bf9-4979-90e1-cb5ee39ad044-kube-api-access-hlll8\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.226292 4888 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.247391 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config" (OuterVolumeSpecName: "config") pod "6cff0163-3bf9-4979-90e1-cb5ee39ad044" (UID: "6cff0163-3bf9-4979-90e1-cb5ee39ad044"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.264276 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6cff0163-3bf9-4979-90e1-cb5ee39ad044" (UID: "6cff0163-3bf9-4979-90e1-cb5ee39ad044"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.283593 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6cff0163-3bf9-4979-90e1-cb5ee39ad044" (UID: "6cff0163-3bf9-4979-90e1-cb5ee39ad044"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.330230 4888 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.330562 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.330734 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6cff0163-3bf9-4979-90e1-cb5ee39ad044-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.421040 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.428723 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7894c9f948-wgblk"] Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.463217 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" path="/var/lib/kubelet/pods/6cff0163-3bf9-4979-90e1-cb5ee39ad044/volumes" Dec 01 19:52:42 crc kubenswrapper[4888]: I1201 19:52:42.947551 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.127955 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerStarted","Data":"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1"} Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.128022 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerStarted","Data":"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5"} Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.325731 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.384666 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.432487 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:43 crc kubenswrapper[4888]: I1201 19:52:43.473028 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:44 crc kubenswrapper[4888]: I1201 19:52:44.191382 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerStarted","Data":"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf"} Dec 01 19:52:44 crc kubenswrapper[4888]: I1201 19:52:44.192777 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="cinder-scheduler" containerID="cri-o://2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a" gracePeriod=30 Dec 01 19:52:44 crc kubenswrapper[4888]: I1201 19:52:44.193054 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="probe" containerID="cri-o://dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190" gracePeriod=30 Dec 01 19:52:45 crc kubenswrapper[4888]: I1201 19:52:45.201571 4888 generic.go:334] "Generic (PLEG): container finished" podID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerID="dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190" exitCode=0 Dec 01 19:52:45 crc kubenswrapper[4888]: I1201 19:52:45.201645 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerDied","Data":"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190"} Dec 01 19:52:45 crc kubenswrapper[4888]: I1201 19:52:45.919794 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.019660 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.019771 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.019921 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.020084 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.020135 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z29b\" (UniqueName: \"kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.020249 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom\") pod \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\" (UID: \"65da125b-a2ae-48d0-afcb-bc6e8ce9765c\") " Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.020260 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.021280 4888 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.029353 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.030475 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b" (OuterVolumeSpecName: "kube-api-access-8z29b") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "kube-api-access-8z29b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.032308 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts" (OuterVolumeSpecName: "scripts") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.100992 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.127814 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.127860 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.127895 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z29b\" (UniqueName: \"kubernetes.io/projected/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-kube-api-access-8z29b\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.127910 4888 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.153405 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data" (OuterVolumeSpecName: "config-data") pod "65da125b-a2ae-48d0-afcb-bc6e8ce9765c" (UID: "65da125b-a2ae-48d0-afcb-bc6e8ce9765c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.216087 4888 generic.go:334] "Generic (PLEG): container finished" podID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerID="2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a" exitCode=0 Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.216171 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerDied","Data":"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a"} Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.216231 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"65da125b-a2ae-48d0-afcb-bc6e8ce9765c","Type":"ContainerDied","Data":"6fd10254dab6b7c9b9902c990b6f3ba9832d62c882a12179e1607057955ad95f"} Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.216256 4888 scope.go:117] "RemoveContainer" containerID="dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.216468 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.234855 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65da125b-a2ae-48d0-afcb-bc6e8ce9765c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.238602 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerStarted","Data":"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1"} Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.239407 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.244457 4888 scope.go:117] "RemoveContainer" containerID="2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.265295 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.273748 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.290781 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.103204164 podStartE2EDuration="6.290748219s" podCreationTimestamp="2025-12-01 19:52:40 +0000 UTC" firstStartedPulling="2025-12-01 19:52:41.172405121 +0000 UTC m=+1161.043435035" lastFinishedPulling="2025-12-01 19:52:45.359949176 +0000 UTC m=+1165.230979090" observedRunningTime="2025-12-01 19:52:46.289356969 +0000 UTC m=+1166.160386893" watchObservedRunningTime="2025-12-01 19:52:46.290748219 +0000 UTC m=+1166.161778133" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.313615 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.314347 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="probe" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314375 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="probe" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.314409 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-api" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314419 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-api" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.314446 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="cinder-scheduler" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314456 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="cinder-scheduler" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.314474 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-httpd" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314486 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-httpd" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314920 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-api" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314947 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cff0163-3bf9-4979-90e1-cb5ee39ad044" containerName="neutron-httpd" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314965 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="cinder-scheduler" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.314979 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" containerName="probe" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.316430 4888 scope.go:117] "RemoveContainer" containerID="dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.316958 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190\": container with ID starting with dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190 not found: ID does not exist" containerID="dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.316994 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190"} err="failed to get container status \"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190\": rpc error: code = NotFound desc = could not find container \"dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190\": container with ID starting with dd04af80736259dfdd654d60adc7bf915e52810218bef8a088542df49f220190 not found: ID does not exist" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.317022 4888 scope.go:117] "RemoveContainer" containerID="2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.317232 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a\": container with ID starting with 2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a not found: ID does not exist" containerID="2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.317257 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a"} err="failed to get container status \"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a\": rpc error: code = NotFound desc = could not find container \"2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a\": container with ID starting with 2354e19472414e35d4b66dc47cb56db6f2827d3c3c1dcee816c318dd1a3edd0a not found: ID does not exist" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.317349 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.321409 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.326915 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439079 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4ljm\" (UniqueName: \"kubernetes.io/projected/4d69bebc-c646-4da3-acc5-c7a3106c8100-kube-api-access-p4ljm\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439169 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-scripts\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439234 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439271 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439296 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d69bebc-c646-4da3-acc5-c7a3106c8100-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.439328 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.479992 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65da125b-a2ae-48d0-afcb-bc6e8ce9765c" path="/var/lib/kubelet/pods/65da125b-a2ae-48d0-afcb-bc6e8ce9765c/volumes" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.541651 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-scripts\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.541787 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.541859 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.541889 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d69bebc-c646-4da3-acc5-c7a3106c8100-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.541976 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.542013 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4d69bebc-c646-4da3-acc5-c7a3106c8100-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.542167 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4ljm\" (UniqueName: \"kubernetes.io/projected/4d69bebc-c646-4da3-acc5-c7a3106c8100-kube-api-access-p4ljm\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.547293 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.547516 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-scripts\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.548144 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.561441 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d69bebc-c646-4da3-acc5-c7a3106c8100-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.577124 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4ljm\" (UniqueName: \"kubernetes.io/projected/4d69bebc-c646-4da3-acc5-c7a3106c8100-kube-api-access-p4ljm\") pod \"cinder-scheduler-0\" (UID: \"4d69bebc-c646-4da3-acc5-c7a3106c8100\") " pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.610743 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.646592 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 19:52:46 crc kubenswrapper[4888]: I1201 19:52:46.715961 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:52:46 crc kubenswrapper[4888]: E1201 19:52:46.941102 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:52:47 crc kubenswrapper[4888]: I1201 19:52:47.196858 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 19:52:47 crc kubenswrapper[4888]: I1201 19:52:47.273847 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4d69bebc-c646-4da3-acc5-c7a3106c8100","Type":"ContainerStarted","Data":"903febc0870dddfebf2e81ed285d0f4c2247e10d1b7ec4c7f61148297dd507fe"} Dec 01 19:52:48 crc kubenswrapper[4888]: I1201 19:52:48.152427 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:52:48 crc kubenswrapper[4888]: I1201 19:52:48.239641 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:48 crc kubenswrapper[4888]: I1201 19:52:48.239895 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="dnsmasq-dns" containerID="cri-o://e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b" gracePeriod=10 Dec 01 19:52:48 crc kubenswrapper[4888]: I1201 19:52:48.306178 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4d69bebc-c646-4da3-acc5-c7a3106c8100","Type":"ContainerStarted","Data":"a32fe2ccf9968675acf61c194d3509911555930141095c5e3a7e428267818712"} Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.149574 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.222795 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.222856 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.222892 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.223094 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.223163 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.223213 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj2jz\" (UniqueName: \"kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz\") pod \"df3749b5-b4ea-4296-8cde-88fcf8011abe\" (UID: \"df3749b5-b4ea-4296-8cde-88fcf8011abe\") " Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.245476 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz" (OuterVolumeSpecName: "kube-api-access-fj2jz") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "kube-api-access-fj2jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.329396 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj2jz\" (UniqueName: \"kubernetes.io/projected/df3749b5-b4ea-4296-8cde-88fcf8011abe-kube-api-access-fj2jz\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.415563 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.430215 4888 generic.go:334] "Generic (PLEG): container finished" podID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerID="e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b" exitCode=0 Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.430367 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" event={"ID":"df3749b5-b4ea-4296-8cde-88fcf8011abe","Type":"ContainerDied","Data":"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b"} Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.430460 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" event={"ID":"df3749b5-b4ea-4296-8cde-88fcf8011abe","Type":"ContainerDied","Data":"430177977dbe6876fedaddca1e76a1d8be2e725273a162b8beb821a07c118168"} Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.430531 4888 scope.go:117] "RemoveContainer" containerID="e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.430550 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.431579 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.447660 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.473966 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config" (OuterVolumeSpecName: "config") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.494739 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.502413 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "df3749b5-b4ea-4296-8cde-88fcf8011abe" (UID: "df3749b5-b4ea-4296-8cde-88fcf8011abe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.532449 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.532492 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.532504 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.532515 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df3749b5-b4ea-4296-8cde-88fcf8011abe-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.570337 4888 scope.go:117] "RemoveContainer" containerID="5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.616380 4888 scope.go:117] "RemoveContainer" containerID="e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b" Dec 01 19:52:49 crc kubenswrapper[4888]: E1201 19:52:49.619120 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b\": container with ID starting with e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b not found: ID does not exist" containerID="e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.619151 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b"} err="failed to get container status \"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b\": rpc error: code = NotFound desc = could not find container \"e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b\": container with ID starting with e678f22535ccc4c518882491b956295f8bb01776e48e9ca88ec59ba5180f6f8b not found: ID does not exist" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.619173 4888 scope.go:117] "RemoveContainer" containerID="5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d" Dec 01 19:52:49 crc kubenswrapper[4888]: E1201 19:52:49.621321 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d\": container with ID starting with 5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d not found: ID does not exist" containerID="5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.621390 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d"} err="failed to get container status \"5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d\": rpc error: code = NotFound desc = could not find container \"5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d\": container with ID starting with 5ac8594835c6d3b3991d3ea7c4999dda5529c62e9933d1fc040850f4981c6c7d not found: ID does not exist" Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.780959 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:49 crc kubenswrapper[4888]: I1201 19:52:49.791563 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-c88kt"] Dec 01 19:52:50 crc kubenswrapper[4888]: I1201 19:52:50.441299 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"4d69bebc-c646-4da3-acc5-c7a3106c8100","Type":"ContainerStarted","Data":"cca7f75100797f571b78b96ba3fd18d4361b9faa88c11ceea3533f59776dab28"} Dec 01 19:52:50 crc kubenswrapper[4888]: I1201 19:52:50.462420 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" path="/var/lib/kubelet/pods/df3749b5-b4ea-4296-8cde-88fcf8011abe/volumes" Dec 01 19:52:50 crc kubenswrapper[4888]: I1201 19:52:50.481215 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.481171744 podStartE2EDuration="4.481171744s" podCreationTimestamp="2025-12-01 19:52:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:50.478949113 +0000 UTC m=+1170.349979037" watchObservedRunningTime="2025-12-01 19:52:50.481171744 +0000 UTC m=+1170.352201658" Dec 01 19:52:50 crc kubenswrapper[4888]: I1201 19:52:50.640971 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.122715 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6c8cb9cfb7-n54hp" Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.155911 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-d8bccccd8-fw8bk" Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.261284 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.456510 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon-log" containerID="cri-o://34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8" gracePeriod=30 Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.457176 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" containerID="cri-o://af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64" gracePeriod=30 Dec 01 19:52:51 crc kubenswrapper[4888]: I1201 19:52:51.648038 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.132281 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.297087 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.579745 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75669cfdf8-fmlxq" Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.669005 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.669523 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-745445fc78-xrn2z" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api-log" containerID="cri-o://cc690ce78641323b443479779f18a10a264628d0bc41dd7f95d0884cd4ed98f2" gracePeriod=30 Dec 01 19:52:52 crc kubenswrapper[4888]: I1201 19:52:52.669671 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-745445fc78-xrn2z" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api" containerID="cri-o://7299a937fe97cbc2ce31bd3aa342161fee45f0a040f1898e5d4644bedb568490" gracePeriod=30 Dec 01 19:52:53 crc kubenswrapper[4888]: I1201 19:52:53.478809 4888 generic.go:334] "Generic (PLEG): container finished" podID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerID="cc690ce78641323b443479779f18a10a264628d0bc41dd7f95d0884cd4ed98f2" exitCode=143 Dec 01 19:52:53 crc kubenswrapper[4888]: I1201 19:52:53.478878 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerDied","Data":"cc690ce78641323b443479779f18a10a264628d0bc41dd7f95d0884cd4ed98f2"} Dec 01 19:52:53 crc kubenswrapper[4888]: I1201 19:52:53.977388 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-55f844cf75-c88kt" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.152:5353: i/o timeout" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.607843 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:51986->10.217.0.147:8443: read: connection reset by peer" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.977254 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 01 19:52:54 crc kubenswrapper[4888]: E1201 19:52:54.978358 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="dnsmasq-dns" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.978387 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="dnsmasq-dns" Dec 01 19:52:54 crc kubenswrapper[4888]: E1201 19:52:54.978428 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="init" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.978439 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="init" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.978710 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="df3749b5-b4ea-4296-8cde-88fcf8011abe" containerName="dnsmasq-dns" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.980884 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.983003 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-bv2nd" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.983959 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 01 19:52:54 crc kubenswrapper[4888]: I1201 19:52:54.993969 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.000062 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.046849 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.047005 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.047036 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config-secret\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.047070 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5q2z\" (UniqueName: \"kubernetes.io/projected/a72a3441-507d-44c7-b575-3c3a12fa6821-kube-api-access-f5q2z\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.149611 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.149776 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.149817 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config-secret\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.149849 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5q2z\" (UniqueName: \"kubernetes.io/projected/a72a3441-507d-44c7-b575-3c3a12fa6821-kube-api-access-f5q2z\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.151978 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.158734 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.160877 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a72a3441-507d-44c7-b575-3c3a12fa6821-openstack-config-secret\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.174785 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5q2z\" (UniqueName: \"kubernetes.io/projected/a72a3441-507d-44c7-b575-3c3a12fa6821-kube-api-access-f5q2z\") pod \"openstackclient\" (UID: \"a72a3441-507d-44c7-b575-3c3a12fa6821\") " pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.317761 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.534567 4888 generic.go:334] "Generic (PLEG): container finished" podID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerID="af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64" exitCode=0 Dec 01 19:52:55 crc kubenswrapper[4888]: I1201 19:52:55.534899 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerDied","Data":"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64"} Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.028222 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.551000 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a72a3441-507d-44c7-b575-3c3a12fa6821","Type":"ContainerStarted","Data":"ba9d9492a50c7b69213dbc0b67d0de4180d06441c881aad899431c90e38c0a36"} Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.555356 4888 generic.go:334] "Generic (PLEG): container finished" podID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerID="7299a937fe97cbc2ce31bd3aa342161fee45f0a040f1898e5d4644bedb568490" exitCode=0 Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.555383 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerDied","Data":"7299a937fe97cbc2ce31bd3aa342161fee45f0a040f1898e5d4644bedb568490"} Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.555399 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-745445fc78-xrn2z" event={"ID":"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc","Type":"ContainerDied","Data":"71c4740962bb7af9aa92dcf0b5c307f603dc53c0167f529f5ef9be5d77ab81ce"} Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.555411 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71c4740962bb7af9aa92dcf0b5c307f603dc53c0167f529f5ef9be5d77ab81ce" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.557546 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.719345 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5khg6\" (UniqueName: \"kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6\") pod \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.719472 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs\") pod \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.719551 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle\") pod \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.719629 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data\") pod \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.719702 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom\") pod \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\" (UID: \"f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc\") " Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.720417 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs" (OuterVolumeSpecName: "logs") pod "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" (UID: "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.745777 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" (UID: "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.761729 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6" (OuterVolumeSpecName: "kube-api-access-5khg6") pod "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" (UID: "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc"). InnerVolumeSpecName "kube-api-access-5khg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.762856 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" (UID: "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.823198 4888 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.823238 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5khg6\" (UniqueName: \"kubernetes.io/projected/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-kube-api-access-5khg6\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.823250 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.823259 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.850490 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data" (OuterVolumeSpecName: "config-data") pod "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" (UID: "f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.909973 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.910932 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="proxy-httpd" containerID="cri-o://2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1" gracePeriod=30 Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.911036 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="sg-core" containerID="cri-o://284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf" gracePeriod=30 Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.911044 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-notification-agent" containerID="cri-o://9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1" gracePeriod=30 Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.911410 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-central-agent" containerID="cri-o://756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5" gracePeriod=30 Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.930247 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": EOF" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.937606 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:56 crc kubenswrapper[4888]: I1201 19:52:56.964938 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 19:52:57 crc kubenswrapper[4888]: E1201 19:52:57.288019 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd5df4c6_7032_4503_b030_f258e37e6c47.slice/crio-2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd5df4c6_7032_4503_b030_f258e37e6c47.slice/crio-conmon-2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.305996 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5b97969cc9-55p9t"] Dec 01 19:52:57 crc kubenswrapper[4888]: E1201 19:52:57.306573 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api-log" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.306594 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api-log" Dec 01 19:52:57 crc kubenswrapper[4888]: E1201 19:52:57.306628 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.306636 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.306859 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api-log" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.306882 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" containerName="barbican-api" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.308796 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.313802 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.314046 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.315698 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.341449 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5b97969cc9-55p9t"] Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362549 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-log-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362642 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hrn6\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-kube-api-access-9hrn6\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362670 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-run-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362691 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-combined-ca-bundle\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362744 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-etc-swift\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362768 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-public-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362802 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-config-data\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.362856 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-internal-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465100 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-etc-swift\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465203 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-public-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465254 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-config-data\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465313 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-internal-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465335 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-log-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465385 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hrn6\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-kube-api-access-9hrn6\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465406 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-run-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.465448 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-combined-ca-bundle\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.468749 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-log-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.469537 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ddf684f3-00b0-4564-99ba-e29243df64fb-run-httpd\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.475406 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-combined-ca-bundle\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.480394 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-internal-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.487225 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-config-data\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.504729 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ddf684f3-00b0-4564-99ba-e29243df64fb-public-tls-certs\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.504894 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-etc-swift\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.506020 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hrn6\" (UniqueName: \"kubernetes.io/projected/ddf684f3-00b0-4564-99ba-e29243df64fb-kube-api-access-9hrn6\") pod \"swift-proxy-5b97969cc9-55p9t\" (UID: \"ddf684f3-00b0-4564-99ba-e29243df64fb\") " pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.582053 4888 generic.go:334] "Generic (PLEG): container finished" podID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerID="2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1" exitCode=0 Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.582096 4888 generic.go:334] "Generic (PLEG): container finished" podID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerID="284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf" exitCode=2 Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.582227 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-745445fc78-xrn2z" Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.584332 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerDied","Data":"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1"} Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.586335 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerDied","Data":"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf"} Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.627597 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.638509 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-745445fc78-xrn2z"] Dec 01 19:52:57 crc kubenswrapper[4888]: I1201 19:52:57.648847 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.247542 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5b97969cc9-55p9t"] Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.463231 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc" path="/var/lib/kubelet/pods/f17fd26f-b70d-423c-9cc0-4dde8bfdd1dc/volumes" Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.599395 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5b97969cc9-55p9t" event={"ID":"ddf684f3-00b0-4564-99ba-e29243df64fb","Type":"ContainerStarted","Data":"94e6070caa75e3324394bb27683b7dd862308919eb3bf678548a577ef6f56d71"} Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.611696 4888 generic.go:334] "Generic (PLEG): container finished" podID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerID="756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5" exitCode=0 Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.611739 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerDied","Data":"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5"} Dec 01 19:52:58 crc kubenswrapper[4888]: I1201 19:52:58.931076 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.005864 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.005998 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.006024 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.006137 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.006205 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgk2w\" (UniqueName: \"kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.006279 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.006317 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle\") pod \"fd5df4c6-7032-4503-b030-f258e37e6c47\" (UID: \"fd5df4c6-7032-4503-b030-f258e37e6c47\") " Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.022629 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.022938 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.035431 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w" (OuterVolumeSpecName: "kube-api-access-xgk2w") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "kube-api-access-xgk2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.040783 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts" (OuterVolumeSpecName: "scripts") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.086357 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.107944 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.107970 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.107980 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fd5df4c6-7032-4503-b030-f258e37e6c47-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.107990 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.107998 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgk2w\" (UniqueName: \"kubernetes.io/projected/fd5df4c6-7032-4503-b030-f258e37e6c47-kube-api-access-xgk2w\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.124768 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.169578 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data" (OuterVolumeSpecName: "config-data") pod "fd5df4c6-7032-4503-b030-f258e37e6c47" (UID: "fd5df4c6-7032-4503-b030-f258e37e6c47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.209552 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.209589 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5df4c6-7032-4503-b030-f258e37e6c47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.627662 4888 generic.go:334] "Generic (PLEG): container finished" podID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerID="9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1" exitCode=0 Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.627959 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.627766 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerDied","Data":"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1"} Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.628022 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fd5df4c6-7032-4503-b030-f258e37e6c47","Type":"ContainerDied","Data":"fc98f88831a0d1e29f297ff8536f34222affdd779c111fc5a9c44e2e6f94ff89"} Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.628050 4888 scope.go:117] "RemoveContainer" containerID="2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.632018 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5b97969cc9-55p9t" event={"ID":"ddf684f3-00b0-4564-99ba-e29243df64fb","Type":"ContainerStarted","Data":"de6e437a2d7c95f0b3ff453d63540ee44f3748599198cc3d10905534dce6e4d2"} Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.632058 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5b97969cc9-55p9t" event={"ID":"ddf684f3-00b0-4564-99ba-e29243df64fb","Type":"ContainerStarted","Data":"66ca90dd2decd748b5609b499e2a687d908f45fcfb8fe4ecd33e2acd074e719a"} Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.632357 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.632472 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.659415 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5b97969cc9-55p9t" podStartSLOduration=2.659394043 podStartE2EDuration="2.659394043s" podCreationTimestamp="2025-12-01 19:52:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:52:59.656429701 +0000 UTC m=+1179.527459625" watchObservedRunningTime="2025-12-01 19:52:59.659394043 +0000 UTC m=+1179.530423947" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.686327 4888 scope.go:117] "RemoveContainer" containerID="284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.712243 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.734688 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.766028 4888 scope.go:117] "RemoveContainer" containerID="9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.811056 4888 scope.go:117] "RemoveContainer" containerID="756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.819069 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.825493 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-notification-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.825550 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-notification-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.825715 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="proxy-httpd" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.825749 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="proxy-httpd" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.825796 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-central-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.825807 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-central-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.825847 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="sg-core" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.825856 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="sg-core" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.827969 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="sg-core" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.828064 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-central-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.828108 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="proxy-httpd" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.828156 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" containerName="ceilometer-notification-agent" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.833403 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.838877 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.839214 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.847867 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.869610 4888 scope.go:117] "RemoveContainer" containerID="2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.871595 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1\": container with ID starting with 2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1 not found: ID does not exist" containerID="2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.871673 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1"} err="failed to get container status \"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1\": rpc error: code = NotFound desc = could not find container \"2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1\": container with ID starting with 2c45678cca52e24eb468945e5cc015120fcf8f3e2f1387ad8dea27dbde2d4db1 not found: ID does not exist" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.871707 4888 scope.go:117] "RemoveContainer" containerID="284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.874587 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf\": container with ID starting with 284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf not found: ID does not exist" containerID="284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.874622 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf"} err="failed to get container status \"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf\": rpc error: code = NotFound desc = could not find container \"284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf\": container with ID starting with 284c610ce724e0efccb92cb7c0093009e7e1ad07905d126f1180dd8a130e4adf not found: ID does not exist" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.874641 4888 scope.go:117] "RemoveContainer" containerID="9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.875521 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1\": container with ID starting with 9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1 not found: ID does not exist" containerID="9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.875552 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1"} err="failed to get container status \"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1\": rpc error: code = NotFound desc = could not find container \"9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1\": container with ID starting with 9bc76466567cbe74d1ea7deeaf9081099c8a395e61825cc5cf084de2c26b89e1 not found: ID does not exist" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.875567 4888 scope.go:117] "RemoveContainer" containerID="756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5" Dec 01 19:52:59 crc kubenswrapper[4888]: E1201 19:52:59.880106 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5\": container with ID starting with 756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5 not found: ID does not exist" containerID="756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5" Dec 01 19:52:59 crc kubenswrapper[4888]: I1201 19:52:59.880146 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5"} err="failed to get container status \"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5\": rpc error: code = NotFound desc = could not find container \"756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5\": container with ID starting with 756f3769a611a073b93ae7b591b46de2e6aebd04fdeb339e72975ff5e21f26d5 not found: ID does not exist" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.032549 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx5xj\" (UniqueName: \"kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.033962 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.034083 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.034135 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.034210 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.034452 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.034606 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.136683 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.137716 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.137815 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx5xj\" (UniqueName: \"kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.138073 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.138141 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.138175 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.138241 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.141211 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.147020 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.159464 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.159520 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.160085 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.160235 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.170338 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx5xj\" (UniqueName: \"kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj\") pod \"ceilometer-0\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.461582 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:00 crc kubenswrapper[4888]: I1201 19:53:00.465697 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5df4c6-7032-4503-b030-f258e37e6c47" path="/var/lib/kubelet/pods/fd5df4c6-7032-4503-b030-f258e37e6c47/volumes" Dec 01 19:53:01 crc kubenswrapper[4888]: I1201 19:53:01.035485 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:01 crc kubenswrapper[4888]: I1201 19:53:01.663498 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerStarted","Data":"cc72f2adb6cfbdaa8f37982d2183dde31f3c00f3995ba52897fe73e372e198ca"} Dec 01 19:53:02 crc kubenswrapper[4888]: I1201 19:53:02.692831 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerStarted","Data":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} Dec 01 19:53:03 crc kubenswrapper[4888]: I1201 19:53:03.711939 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerStarted","Data":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} Dec 01 19:53:04 crc kubenswrapper[4888]: I1201 19:53:04.508364 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 01 19:53:06 crc kubenswrapper[4888]: I1201 19:53:06.230572 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:07 crc kubenswrapper[4888]: E1201 19:53:07.623308 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:07 crc kubenswrapper[4888]: I1201 19:53:07.656146 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:53:07 crc kubenswrapper[4888]: I1201 19:53:07.661656 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5b97969cc9-55p9t" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.406785 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495552 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495766 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495867 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495905 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495931 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.495979 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lhxz\" (UniqueName: \"kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.496026 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs\") pod \"aa098f68-2731-4769-a237-6b568758e588\" (UID: \"aa098f68-2731-4769-a237-6b568758e588\") " Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.496450 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.496651 4888 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa098f68-2731-4769-a237-6b568758e588-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.498697 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs" (OuterVolumeSpecName: "logs") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.505340 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.505403 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts" (OuterVolumeSpecName: "scripts") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.506383 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz" (OuterVolumeSpecName: "kube-api-access-7lhxz") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "kube-api-access-7lhxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.530588 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.560718 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data" (OuterVolumeSpecName: "config-data") pod "aa098f68-2731-4769-a237-6b568758e588" (UID: "aa098f68-2731-4769-a237-6b568758e588"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599842 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599886 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599906 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599918 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lhxz\" (UniqueName: \"kubernetes.io/projected/aa098f68-2731-4769-a237-6b568758e588-kube-api-access-7lhxz\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599935 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa098f68-2731-4769-a237-6b568758e588-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.599948 4888 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa098f68-2731-4769-a237-6b568758e588-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.816307 4888 generic.go:334] "Generic (PLEG): container finished" podID="aa098f68-2731-4769-a237-6b568758e588" containerID="ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce" exitCode=137 Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.816494 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.818052 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerDied","Data":"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce"} Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.818118 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa098f68-2731-4769-a237-6b568758e588","Type":"ContainerDied","Data":"cf0222710c6a7bf713a630599720626ce9d5839395658ab615d3a97cb234faaf"} Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.818141 4888 scope.go:117] "RemoveContainer" containerID="ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.823728 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerStarted","Data":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.825448 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a72a3441-507d-44c7-b575-3c3a12fa6821","Type":"ContainerStarted","Data":"c75b767821ba621134633b4ba7a40c0cc75a19b96769f0970e63e6f446a3af7f"} Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.878980 4888 scope.go:117] "RemoveContainer" containerID="928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.923925 4888 scope.go:117] "RemoveContainer" containerID="ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce" Dec 01 19:53:10 crc kubenswrapper[4888]: E1201 19:53:10.924917 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce\": container with ID starting with ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce not found: ID does not exist" containerID="ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.925085 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce"} err="failed to get container status \"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce\": rpc error: code = NotFound desc = could not find container \"ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce\": container with ID starting with ca4a20134f5a8bb0dfaac6c79907775311dd0a5b974c392c7ac074b84eb0d9ce not found: ID does not exist" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.925309 4888 scope.go:117] "RemoveContainer" containerID="928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197" Dec 01 19:53:10 crc kubenswrapper[4888]: E1201 19:53:10.926729 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197\": container with ID starting with 928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197 not found: ID does not exist" containerID="928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.926872 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197"} err="failed to get container status \"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197\": rpc error: code = NotFound desc = could not find container \"928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197\": container with ID starting with 928752f4a34fbefb7479056dfdcd0342a0af98ee06bc2c359c61f8743d6f8197 not found: ID does not exist" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.932505 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.977243451 podStartE2EDuration="16.932412877s" podCreationTimestamp="2025-12-01 19:52:54 +0000 UTC" firstStartedPulling="2025-12-01 19:52:56.041232358 +0000 UTC m=+1175.912262272" lastFinishedPulling="2025-12-01 19:53:09.996401784 +0000 UTC m=+1189.867431698" observedRunningTime="2025-12-01 19:53:10.851549312 +0000 UTC m=+1190.722579236" watchObservedRunningTime="2025-12-01 19:53:10.932412877 +0000 UTC m=+1190.803442791" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.954341 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.968253 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.985148 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:53:10 crc kubenswrapper[4888]: E1201 19:53:10.985898 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api-log" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.985923 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api-log" Dec 01 19:53:10 crc kubenswrapper[4888]: E1201 19:53:10.985956 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.985963 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.986155 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api-log" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.986204 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa098f68-2731-4769-a237-6b568758e588" containerName="cinder-api" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.987424 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.992810 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.992917 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 01 19:53:10 crc kubenswrapper[4888]: I1201 19:53:10.992947 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.001903 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.117836 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-logs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.117897 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.118083 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jvf8\" (UniqueName: \"kubernetes.io/projected/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-kube-api-access-9jvf8\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.118417 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.118706 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.118930 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.118991 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.119122 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.119295 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-scripts\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.221774 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jvf8\" (UniqueName: \"kubernetes.io/projected/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-kube-api-access-9jvf8\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.221877 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.221959 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222027 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222056 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222109 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222155 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-scripts\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222204 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-logs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222259 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222931 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-logs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.222199 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.227940 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-public-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.228406 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data-custom\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.228456 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-scripts\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.229837 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.231258 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.239119 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-config-data\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.247042 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jvf8\" (UniqueName: \"kubernetes.io/projected/6b0b6a93-78d0-43c8-b6fb-059da98cf4bd-kube-api-access-9jvf8\") pod \"cinder-api-0\" (UID: \"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd\") " pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.343193 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.662878 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 19:53:11 crc kubenswrapper[4888]: I1201 19:53:11.850214 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd","Type":"ContainerStarted","Data":"74914329c787e484c7a3dceec6ae94e4fa4b511a5c5aaa84348630706b894e08"} Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.477266 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa098f68-2731-4769-a237-6b568758e588" path="/var/lib/kubelet/pods/aa098f68-2731-4769-a237-6b568758e588/volumes" Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872105 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerStarted","Data":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872446 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="proxy-httpd" containerID="cri-o://0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" gracePeriod=30 Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872504 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872347 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-central-agent" containerID="cri-o://c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" gracePeriod=30 Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872663 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-notification-agent" containerID="cri-o://7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" gracePeriod=30 Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.872742 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="sg-core" containerID="cri-o://d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" gracePeriod=30 Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.876550 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd","Type":"ContainerStarted","Data":"c31c085d1463391ce43559bd07ec822d6e2f7c77744c828c090432d84bce19e5"} Dec 01 19:53:12 crc kubenswrapper[4888]: I1201 19:53:12.902490 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.016177321 podStartE2EDuration="13.902463935s" podCreationTimestamp="2025-12-01 19:52:59 +0000 UTC" firstStartedPulling="2025-12-01 19:53:01.047436702 +0000 UTC m=+1180.918466616" lastFinishedPulling="2025-12-01 19:53:11.933723306 +0000 UTC m=+1191.804753230" observedRunningTime="2025-12-01 19:53:12.900815819 +0000 UTC m=+1192.771845743" watchObservedRunningTime="2025-12-01 19:53:12.902463935 +0000 UTC m=+1192.773493849" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.837613 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884161 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884446 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx5xj\" (UniqueName: \"kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884529 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884597 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884672 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884758 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.884822 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data\") pod \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\" (UID: \"ab13b368-3838-42a2-ade4-f7ee97ed6ab3\") " Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.891999 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.894398 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.898029 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts" (OuterVolumeSpecName: "scripts") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.898282 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj" (OuterVolumeSpecName: "kube-api-access-fx5xj") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "kube-api-access-fx5xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914785 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" exitCode=0 Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914823 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" exitCode=2 Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914835 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" exitCode=0 Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914845 4888 generic.go:334] "Generic (PLEG): container finished" podID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" exitCode=0 Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914913 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerDied","Data":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914924 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914953 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerDied","Data":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914966 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerDied","Data":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914976 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerDied","Data":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.914986 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab13b368-3838-42a2-ade4-f7ee97ed6ab3","Type":"ContainerDied","Data":"cc72f2adb6cfbdaa8f37982d2183dde31f3c00f3995ba52897fe73e372e198ca"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.915007 4888 scope.go:117] "RemoveContainer" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.937908 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6b0b6a93-78d0-43c8-b6fb-059da98cf4bd","Type":"ContainerStarted","Data":"3a8e979933fb07b5fc062ec4e06ce12a14c42555117d4f019ddf424bc5000b80"} Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.938721 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.945053 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.981732 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.981698837 podStartE2EDuration="3.981698837s" podCreationTimestamp="2025-12-01 19:53:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:13.959132073 +0000 UTC m=+1193.830161987" watchObservedRunningTime="2025-12-01 19:53:13.981698837 +0000 UTC m=+1193.852728751" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.987702 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx5xj\" (UniqueName: \"kubernetes.io/projected/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-kube-api-access-fx5xj\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.987736 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.987747 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.987755 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.987764 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:13 crc kubenswrapper[4888]: I1201 19:53:13.998777 4888 scope.go:117] "RemoveContainer" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.101973 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.104654 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data" (OuterVolumeSpecName: "config-data") pod "ab13b368-3838-42a2-ade4-f7ee97ed6ab3" (UID: "ab13b368-3838-42a2-ade4-f7ee97ed6ab3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.195558 4888 scope.go:117] "RemoveContainer" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.196847 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.196917 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab13b368-3838-42a2-ade4-f7ee97ed6ab3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.216521 4888 scope.go:117] "RemoveContainer" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.243321 4888 scope.go:117] "RemoveContainer" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.243987 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": container with ID starting with 0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8 not found: ID does not exist" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244055 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} err="failed to get container status \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": rpc error: code = NotFound desc = could not find container \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": container with ID starting with 0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8 not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244102 4888 scope.go:117] "RemoveContainer" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.244551 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": container with ID starting with d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd not found: ID does not exist" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244595 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} err="failed to get container status \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": rpc error: code = NotFound desc = could not find container \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": container with ID starting with d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244632 4888 scope.go:117] "RemoveContainer" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.244897 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": container with ID starting with 7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e not found: ID does not exist" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244932 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} err="failed to get container status \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": rpc error: code = NotFound desc = could not find container \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": container with ID starting with 7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.244949 4888 scope.go:117] "RemoveContainer" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.245270 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": container with ID starting with c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec not found: ID does not exist" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.245295 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} err="failed to get container status \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": rpc error: code = NotFound desc = could not find container \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": container with ID starting with c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.245309 4888 scope.go:117] "RemoveContainer" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.251867 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} err="failed to get container status \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": rpc error: code = NotFound desc = could not find container \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": container with ID starting with 0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8 not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.251937 4888 scope.go:117] "RemoveContainer" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.255935 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} err="failed to get container status \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": rpc error: code = NotFound desc = could not find container \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": container with ID starting with d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.255981 4888 scope.go:117] "RemoveContainer" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.256765 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} err="failed to get container status \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": rpc error: code = NotFound desc = could not find container \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": container with ID starting with 7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.256845 4888 scope.go:117] "RemoveContainer" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.257464 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} err="failed to get container status \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": rpc error: code = NotFound desc = could not find container \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": container with ID starting with c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.257502 4888 scope.go:117] "RemoveContainer" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.257912 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} err="failed to get container status \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": rpc error: code = NotFound desc = could not find container \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": container with ID starting with 0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8 not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.257941 4888 scope.go:117] "RemoveContainer" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.258290 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} err="failed to get container status \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": rpc error: code = NotFound desc = could not find container \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": container with ID starting with d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.258334 4888 scope.go:117] "RemoveContainer" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.258851 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} err="failed to get container status \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": rpc error: code = NotFound desc = could not find container \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": container with ID starting with 7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.258881 4888 scope.go:117] "RemoveContainer" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.259278 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} err="failed to get container status \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": rpc error: code = NotFound desc = could not find container \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": container with ID starting with c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.259321 4888 scope.go:117] "RemoveContainer" containerID="0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.259816 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8"} err="failed to get container status \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": rpc error: code = NotFound desc = could not find container \"0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8\": container with ID starting with 0ace7a13488df0f722c1e41ddd6b053c3b455bb152fe9dc0e344f275eea9b6b8 not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.259870 4888 scope.go:117] "RemoveContainer" containerID="d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.260167 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd"} err="failed to get container status \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": rpc error: code = NotFound desc = could not find container \"d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd\": container with ID starting with d2afab35f2984a7c407b9742d1b9569df72e6400ec526a693695d388ba09ffbd not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.260215 4888 scope.go:117] "RemoveContainer" containerID="7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.260562 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e"} err="failed to get container status \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": rpc error: code = NotFound desc = could not find container \"7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e\": container with ID starting with 7401acc225f463340cb0e596d2c58da44f597e9b282ce1e4970f7cd923f2dc0e not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.260595 4888 scope.go:117] "RemoveContainer" containerID="c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.260885 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec"} err="failed to get container status \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": rpc error: code = NotFound desc = could not find container \"c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec\": container with ID starting with c721f19555a28d2bf221f5c63d9985dedba299575c0565bf2fd61174dedebcec not found: ID does not exist" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.267085 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.295497 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.309229 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.309799 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-notification-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.309826 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-notification-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.309853 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="proxy-httpd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.309861 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="proxy-httpd" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.309872 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-central-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.309879 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-central-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: E1201 19:53:14.309886 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="sg-core" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.309893 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="sg-core" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.310114 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="proxy-httpd" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.310134 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-notification-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.310156 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="sg-core" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.310172 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" containerName="ceilometer-central-agent" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.312170 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.316774 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.317073 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.317567 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.403485 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.403647 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.403928 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.404037 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.404070 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.404251 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llr58\" (UniqueName: \"kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.404307 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.466682 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab13b368-3838-42a2-ade4-f7ee97ed6ab3" path="/var/lib/kubelet/pods/ab13b368-3838-42a2-ade4-f7ee97ed6ab3/volumes" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.506794 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.506911 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507058 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507120 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507146 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507251 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llr58\" (UniqueName: \"kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507286 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507635 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.507772 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.508452 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-856c6474d8-q6nhf" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.147:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.147:8443: connect: connection refused" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.508574 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.512320 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.512476 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.513519 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.519997 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.529855 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llr58\" (UniqueName: \"kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58\") pod \"ceilometer-0\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " pod="openstack/ceilometer-0" Dec 01 19:53:14 crc kubenswrapper[4888]: I1201 19:53:14.632346 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:15 crc kubenswrapper[4888]: I1201 19:53:15.205825 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:15 crc kubenswrapper[4888]: W1201 19:53:15.212576 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13958130_9b7e_42c3_8c2d_d2486ac32798.slice/crio-6feb09dcbcdbc8f88d187b5f34ac78cc460d429e10cbde198c28a9fa64f84ad2 WatchSource:0}: Error finding container 6feb09dcbcdbc8f88d187b5f34ac78cc460d429e10cbde198c28a9fa64f84ad2: Status 404 returned error can't find the container with id 6feb09dcbcdbc8f88d187b5f34ac78cc460d429e10cbde198c28a9fa64f84ad2 Dec 01 19:53:15 crc kubenswrapper[4888]: I1201 19:53:15.969132 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerStarted","Data":"bd81f44bf4861352ba63859c0d734505ea286bf5d0b3f84d4dcb8a74f1ed8b08"} Dec 01 19:53:15 crc kubenswrapper[4888]: I1201 19:53:15.969608 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerStarted","Data":"6feb09dcbcdbc8f88d187b5f34ac78cc460d429e10cbde198c28a9fa64f84ad2"} Dec 01 19:53:16 crc kubenswrapper[4888]: I1201 19:53:16.981294 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerStarted","Data":"883038450cbd09e6679340749573c428098091a5102e84524f748bd7e5eb8405"} Dec 01 19:53:17 crc kubenswrapper[4888]: E1201 19:53:17.924923 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dcfdef_7ab0_4e79_b56e_3d3536a60538.slice\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:17 crc kubenswrapper[4888]: I1201 19:53:17.996320 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerStarted","Data":"22814d8bcb82819e027a42b2658a2ab6a63745732c11776755d91019f66affec"} Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.711459 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-q4zxq"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.713419 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.729327 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-q4zxq"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.810359 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s87sz\" (UniqueName: \"kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.811156 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.823234 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-nwdjg"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.825056 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.851423 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nwdjg"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.920822 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq8nz\" (UniqueName: \"kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.921201 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s87sz\" (UniqueName: \"kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.921417 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.921512 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.922477 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.927901 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-9cnzd"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.930508 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.941344 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9be2-account-create-update-4dzgs"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.942865 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.949440 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.953019 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9cnzd"] Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.964043 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s87sz\" (UniqueName: \"kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz\") pod \"nova-api-db-create-q4zxq\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:18 crc kubenswrapper[4888]: I1201 19:53:18.966685 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9be2-account-create-update-4dzgs"] Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.023989 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq8nz\" (UniqueName: \"kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.024071 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4g4\" (UniqueName: \"kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.024138 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.024177 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.024253 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwcjd\" (UniqueName: \"kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.024337 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.025410 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.054601 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.063936 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq8nz\" (UniqueName: \"kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz\") pod \"nova-cell0-db-create-nwdjg\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.127787 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0ad5-account-create-update-jmqp8"] Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.127884 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4g4\" (UniqueName: \"kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.128660 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.128802 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.129054 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwcjd\" (UniqueName: \"kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.130994 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.131121 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.131719 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.136627 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.148898 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ad5-account-create-update-jmqp8"] Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.170860 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.170880 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4g4\" (UniqueName: \"kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4\") pod \"nova-cell1-db-create-9cnzd\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.184416 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwcjd\" (UniqueName: \"kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd\") pod \"nova-api-9be2-account-create-update-4dzgs\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.232631 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.232757 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hlbp\" (UniqueName: \"kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.331942 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.337822 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.337938 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hlbp\" (UniqueName: \"kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.340691 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.355027 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.367628 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hlbp\" (UniqueName: \"kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp\") pod \"nova-cell0-0ad5-account-create-update-jmqp8\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.385080 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9ee4-account-create-update-5rm9g"] Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.418458 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9ee4-account-create-update-5rm9g"] Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.418675 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.423752 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.553529 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.553893 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqhgh\" (UniqueName: \"kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.594899 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.659643 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.660027 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqhgh\" (UniqueName: \"kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.661633 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.697761 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqhgh\" (UniqueName: \"kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh\") pod \"nova-cell1-9ee4-account-create-update-5rm9g\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.701857 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-q4zxq"] Dec 01 19:53:19 crc kubenswrapper[4888]: W1201 19:53:19.717407 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5908ab4f_64fe_466c_b085_0c70ca92a868.slice/crio-bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98 WatchSource:0}: Error finding container bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98: Status 404 returned error can't find the container with id bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98 Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.776764 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:19 crc kubenswrapper[4888]: I1201 19:53:19.857082 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nwdjg"] Dec 01 19:53:19 crc kubenswrapper[4888]: W1201 19:53:19.899255 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5 WatchSource:0}: Error finding container 006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5: Status 404 returned error can't find the container with id 006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5 Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.060558 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerStarted","Data":"3c6faed749bfecc752e9ababeb0f486dba03f803ea0432979872ad1ac8cb411e"} Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.062498 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.073820 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nwdjg" event={"ID":"d072d085-14a2-4137-a9a1-29882ab4fe55","Type":"ContainerStarted","Data":"006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5"} Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.085548 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9cnzd"] Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.092102 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q4zxq" event={"ID":"5908ab4f-64fe-466c-b085-0c70ca92a868","Type":"ContainerStarted","Data":"dac2766fc9b1fc690e4c31dc4a3ce48f1727e9eccfe592530996b697542f8947"} Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.092174 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q4zxq" event={"ID":"5908ab4f-64fe-466c-b085-0c70ca92a868","Type":"ContainerStarted","Data":"bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98"} Dec 01 19:53:20 crc kubenswrapper[4888]: W1201 19:53:20.102436 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91704c86_8e91_4bf0_8ee0_def68c8c321a.slice/crio-25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488 WatchSource:0}: Error finding container 25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488: Status 404 returned error can't find the container with id 25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488 Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.105057 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9be2-account-create-update-4dzgs"] Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.109860 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.848373187 podStartE2EDuration="6.109836245s" podCreationTimestamp="2025-12-01 19:53:14 +0000 UTC" firstStartedPulling="2025-12-01 19:53:15.218456264 +0000 UTC m=+1195.089486178" lastFinishedPulling="2025-12-01 19:53:19.479919322 +0000 UTC m=+1199.350949236" observedRunningTime="2025-12-01 19:53:20.08940909 +0000 UTC m=+1199.960439024" watchObservedRunningTime="2025-12-01 19:53:20.109836245 +0000 UTC m=+1199.980866159" Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.135503 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-q4zxq" podStartSLOduration=2.135480184 podStartE2EDuration="2.135480184s" podCreationTimestamp="2025-12-01 19:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:20.119519253 +0000 UTC m=+1199.990549167" watchObservedRunningTime="2025-12-01 19:53:20.135480184 +0000 UTC m=+1200.006510098" Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.262035 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ad5-account-create-update-jmqp8"] Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.542824 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9ee4-account-create-update-5rm9g"] Dec 01 19:53:20 crc kubenswrapper[4888]: I1201 19:53:20.544887 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.136651 4888 generic.go:334] "Generic (PLEG): container finished" podID="d072d085-14a2-4137-a9a1-29882ab4fe55" containerID="b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.137656 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nwdjg" event={"ID":"d072d085-14a2-4137-a9a1-29882ab4fe55","Type":"ContainerDied","Data":"b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.161328 4888 generic.go:334] "Generic (PLEG): container finished" podID="91704c86-8e91-4bf0-8ee0-def68c8c321a" containerID="d789933eb23391062144ecccdd3b256dd69de3c76157aab36af8cde91f873f9e" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.161424 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9be2-account-create-update-4dzgs" event={"ID":"91704c86-8e91-4bf0-8ee0-def68c8c321a","Type":"ContainerDied","Data":"d789933eb23391062144ecccdd3b256dd69de3c76157aab36af8cde91f873f9e"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.161455 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9be2-account-create-update-4dzgs" event={"ID":"91704c86-8e91-4bf0-8ee0-def68c8c321a","Type":"ContainerStarted","Data":"25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.175370 4888 generic.go:334] "Generic (PLEG): container finished" podID="5908ab4f-64fe-466c-b085-0c70ca92a868" containerID="dac2766fc9b1fc690e4c31dc4a3ce48f1727e9eccfe592530996b697542f8947" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.175473 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q4zxq" event={"ID":"5908ab4f-64fe-466c-b085-0c70ca92a868","Type":"ContainerDied","Data":"dac2766fc9b1fc690e4c31dc4a3ce48f1727e9eccfe592530996b697542f8947"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.198671 4888 generic.go:334] "Generic (PLEG): container finished" podID="e53eb8db-1d23-4aac-85d2-36f1008834bb" containerID="eb04160056694df50ee360c0cfa6f5f561a83bd3c2dc58e6632472dc910706d8" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.198867 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" event={"ID":"e53eb8db-1d23-4aac-85d2-36f1008834bb","Type":"ContainerDied","Data":"eb04160056694df50ee360c0cfa6f5f561a83bd3c2dc58e6632472dc910706d8"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.198911 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" event={"ID":"e53eb8db-1d23-4aac-85d2-36f1008834bb","Type":"ContainerStarted","Data":"ed7b942a048de0889e45b34e1efb8ee68476ba9d42ebf1ebadcb5fb2e38cdd37"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.212313 4888 generic.go:334] "Generic (PLEG): container finished" podID="7528663a-6635-4fe0-8840-d0d0601799ce" containerID="5688aa7a2c95c898b381703f839ca94238e217d5a8e7f1f66c15de4c4436e353" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.212539 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" event={"ID":"7528663a-6635-4fe0-8840-d0d0601799ce","Type":"ContainerDied","Data":"5688aa7a2c95c898b381703f839ca94238e217d5a8e7f1f66c15de4c4436e353"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.218567 4888 generic.go:334] "Generic (PLEG): container finished" podID="5f05189a-2b40-4e96-bc85-1b23401fc9d9" containerID="c5c4222a640131fbbbb14d210300042be26d7b4c86b178394d6c3de5dc81f73b" exitCode=0 Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.219312 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" event={"ID":"7528663a-6635-4fe0-8840-d0d0601799ce","Type":"ContainerStarted","Data":"002e339c837bd7255ea4874d7c460a02879668789e287fb3818d7bca9f805770"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.219355 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9cnzd" event={"ID":"5f05189a-2b40-4e96-bc85-1b23401fc9d9","Type":"ContainerDied","Data":"c5c4222a640131fbbbb14d210300042be26d7b4c86b178394d6c3de5dc81f73b"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.219371 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9cnzd" event={"ID":"5f05189a-2b40-4e96-bc85-1b23401fc9d9","Type":"ContainerStarted","Data":"b18664f3393ab9ca553a3e7932710eab4586f4da3a7583f8e4550fde48100318"} Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.909123 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:53:21 crc kubenswrapper[4888]: I1201 19:53:21.987451 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071165 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071418 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071529 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzzh8\" (UniqueName: \"kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071562 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071634 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071689 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.071783 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs\") pod \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\" (UID: \"f5a90a13-4ff1-4a48-8ced-df9b4765db1b\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.072689 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs" (OuterVolumeSpecName: "logs") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.079575 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.081350 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8" (OuterVolumeSpecName: "kube-api-access-xzzh8") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "kube-api-access-xzzh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.127506 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data" (OuterVolumeSpecName: "config-data") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.130625 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.139050 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts" (OuterVolumeSpecName: "scripts") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.150614 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "f5a90a13-4ff1-4a48-8ced-df9b4765db1b" (UID: "f5a90a13-4ff1-4a48-8ced-df9b4765db1b"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.174893 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.174948 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzzh8\" (UniqueName: \"kubernetes.io/projected/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-kube-api-access-xzzh8\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.174964 4888 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.174982 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.174997 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.175010 4888 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.175021 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f5a90a13-4ff1-4a48-8ced-df9b4765db1b-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.231838 4888 generic.go:334] "Generic (PLEG): container finished" podID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerID="34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8" exitCode=137 Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.231900 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-856c6474d8-q6nhf" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.231904 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerDied","Data":"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8"} Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.231971 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-856c6474d8-q6nhf" event={"ID":"f5a90a13-4ff1-4a48-8ced-df9b4765db1b","Type":"ContainerDied","Data":"f5cafde9ab32748bb28aacfc8a89d8726a418eced2306e2e75a8bf901e49ca45"} Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.231994 4888 scope.go:117] "RemoveContainer" containerID="af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.289250 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.295881 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-856c6474d8-q6nhf"] Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.464606 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" path="/var/lib/kubelet/pods/f5a90a13-4ff1-4a48-8ced-df9b4765db1b/volumes" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.542064 4888 scope.go:117] "RemoveContainer" containerID="34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.567793 4888 scope.go:117] "RemoveContainer" containerID="af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64" Dec 01 19:53:22 crc kubenswrapper[4888]: E1201 19:53:22.568531 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64\": container with ID starting with af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64 not found: ID does not exist" containerID="af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.568558 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64"} err="failed to get container status \"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64\": rpc error: code = NotFound desc = could not find container \"af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64\": container with ID starting with af984627e307b4e49f8e194a8905d62f74785034aab997cb8e0be7b88d00aa64 not found: ID does not exist" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.568578 4888 scope.go:117] "RemoveContainer" containerID="34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8" Dec 01 19:53:22 crc kubenswrapper[4888]: E1201 19:53:22.570550 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8\": container with ID starting with 34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8 not found: ID does not exist" containerID="34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.570579 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8"} err="failed to get container status \"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8\": rpc error: code = NotFound desc = could not find container \"34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8\": container with ID starting with 34a59abd2e41f7b7620b6a1585676a3a77b3c7d31ad59831f04219ad273a9cf8 not found: ID does not exist" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.765108 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.897305 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts\") pod \"91704c86-8e91-4bf0-8ee0-def68c8c321a\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.897678 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwcjd\" (UniqueName: \"kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd\") pod \"91704c86-8e91-4bf0-8ee0-def68c8c321a\" (UID: \"91704c86-8e91-4bf0-8ee0-def68c8c321a\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.898607 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91704c86-8e91-4bf0-8ee0-def68c8c321a" (UID: "91704c86-8e91-4bf0-8ee0-def68c8c321a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.904020 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.906418 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd" (OuterVolumeSpecName: "kube-api-access-wwcjd") pod "91704c86-8e91-4bf0-8ee0-def68c8c321a" (UID: "91704c86-8e91-4bf0-8ee0-def68c8c321a"). InnerVolumeSpecName "kube-api-access-wwcjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.920593 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.938949 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.964455 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.992178 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.998969 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts\") pod \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.999262 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd4g4\" (UniqueName: \"kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4\") pod \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\" (UID: \"5f05189a-2b40-4e96-bc85-1b23401fc9d9\") " Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.999750 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwcjd\" (UniqueName: \"kubernetes.io/projected/91704c86-8e91-4bf0-8ee0-def68c8c321a-kube-api-access-wwcjd\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:22 crc kubenswrapper[4888]: I1201 19:53:22.999771 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91704c86-8e91-4bf0-8ee0-def68c8c321a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.001013 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f05189a-2b40-4e96-bc85-1b23401fc9d9" (UID: "5f05189a-2b40-4e96-bc85-1b23401fc9d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.003958 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4" (OuterVolumeSpecName: "kube-api-access-sd4g4") pod "5f05189a-2b40-4e96-bc85-1b23401fc9d9" (UID: "5f05189a-2b40-4e96-bc85-1b23401fc9d9"). InnerVolumeSpecName "kube-api-access-sd4g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101340 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq8nz\" (UniqueName: \"kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz\") pod \"d072d085-14a2-4137-a9a1-29882ab4fe55\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101471 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts\") pod \"7528663a-6635-4fe0-8840-d0d0601799ce\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101504 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqhgh\" (UniqueName: \"kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh\") pod \"e53eb8db-1d23-4aac-85d2-36f1008834bb\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101570 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts\") pod \"d072d085-14a2-4137-a9a1-29882ab4fe55\" (UID: \"d072d085-14a2-4137-a9a1-29882ab4fe55\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101594 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hlbp\" (UniqueName: \"kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp\") pod \"7528663a-6635-4fe0-8840-d0d0601799ce\" (UID: \"7528663a-6635-4fe0-8840-d0d0601799ce\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101649 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts\") pod \"5908ab4f-64fe-466c-b085-0c70ca92a868\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101697 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts\") pod \"e53eb8db-1d23-4aac-85d2-36f1008834bb\" (UID: \"e53eb8db-1d23-4aac-85d2-36f1008834bb\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.101789 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s87sz\" (UniqueName: \"kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz\") pod \"5908ab4f-64fe-466c-b085-0c70ca92a868\" (UID: \"5908ab4f-64fe-466c-b085-0c70ca92a868\") " Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.102407 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d072d085-14a2-4137-a9a1-29882ab4fe55" (UID: "d072d085-14a2-4137-a9a1-29882ab4fe55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.102599 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d072d085-14a2-4137-a9a1-29882ab4fe55-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.102628 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd4g4\" (UniqueName: \"kubernetes.io/projected/5f05189a-2b40-4e96-bc85-1b23401fc9d9-kube-api-access-sd4g4\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.102643 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f05189a-2b40-4e96-bc85-1b23401fc9d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.105269 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7528663a-6635-4fe0-8840-d0d0601799ce" (UID: "7528663a-6635-4fe0-8840-d0d0601799ce"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.106578 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp" (OuterVolumeSpecName: "kube-api-access-8hlbp") pod "7528663a-6635-4fe0-8840-d0d0601799ce" (UID: "7528663a-6635-4fe0-8840-d0d0601799ce"). InnerVolumeSpecName "kube-api-access-8hlbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.106872 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5908ab4f-64fe-466c-b085-0c70ca92a868" (UID: "5908ab4f-64fe-466c-b085-0c70ca92a868"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.107163 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e53eb8db-1d23-4aac-85d2-36f1008834bb" (UID: "e53eb8db-1d23-4aac-85d2-36f1008834bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.108473 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz" (OuterVolumeSpecName: "kube-api-access-bq8nz") pod "d072d085-14a2-4137-a9a1-29882ab4fe55" (UID: "d072d085-14a2-4137-a9a1-29882ab4fe55"). InnerVolumeSpecName "kube-api-access-bq8nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.109150 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh" (OuterVolumeSpecName: "kube-api-access-tqhgh") pod "e53eb8db-1d23-4aac-85d2-36f1008834bb" (UID: "e53eb8db-1d23-4aac-85d2-36f1008834bb"). InnerVolumeSpecName "kube-api-access-tqhgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.109739 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz" (OuterVolumeSpecName: "kube-api-access-s87sz") pod "5908ab4f-64fe-466c-b085-0c70ca92a868" (UID: "5908ab4f-64fe-466c-b085-0c70ca92a868"). InnerVolumeSpecName "kube-api-access-s87sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204870 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq8nz\" (UniqueName: \"kubernetes.io/projected/d072d085-14a2-4137-a9a1-29882ab4fe55-kube-api-access-bq8nz\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204914 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7528663a-6635-4fe0-8840-d0d0601799ce-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204927 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqhgh\" (UniqueName: \"kubernetes.io/projected/e53eb8db-1d23-4aac-85d2-36f1008834bb-kube-api-access-tqhgh\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204940 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hlbp\" (UniqueName: \"kubernetes.io/projected/7528663a-6635-4fe0-8840-d0d0601799ce-kube-api-access-8hlbp\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204954 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5908ab4f-64fe-466c-b085-0c70ca92a868-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204967 4888 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e53eb8db-1d23-4aac-85d2-36f1008834bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.204981 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s87sz\" (UniqueName: \"kubernetes.io/projected/5908ab4f-64fe-466c-b085-0c70ca92a868-kube-api-access-s87sz\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.243475 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" event={"ID":"e53eb8db-1d23-4aac-85d2-36f1008834bb","Type":"ContainerDied","Data":"ed7b942a048de0889e45b34e1efb8ee68476ba9d42ebf1ebadcb5fb2e38cdd37"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.243542 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed7b942a048de0889e45b34e1efb8ee68476ba9d42ebf1ebadcb5fb2e38cdd37" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.243631 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9ee4-account-create-update-5rm9g" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.247661 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.247865 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ad5-account-create-update-jmqp8" event={"ID":"7528663a-6635-4fe0-8840-d0d0601799ce","Type":"ContainerDied","Data":"002e339c837bd7255ea4874d7c460a02879668789e287fb3818d7bca9f805770"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.247929 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="002e339c837bd7255ea4874d7c460a02879668789e287fb3818d7bca9f805770" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.251588 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9cnzd" event={"ID":"5f05189a-2b40-4e96-bc85-1b23401fc9d9","Type":"ContainerDied","Data":"b18664f3393ab9ca553a3e7932710eab4586f4da3a7583f8e4550fde48100318"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.251636 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b18664f3393ab9ca553a3e7932710eab4586f4da3a7583f8e4550fde48100318" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.251875 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9cnzd" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.253163 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nwdjg" event={"ID":"d072d085-14a2-4137-a9a1-29882ab4fe55","Type":"ContainerDied","Data":"006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.253215 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="006bd137828cd96ad1175bb1cebfeacfd47d4ed6369e3d0c80031367f715cbc5" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.253225 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nwdjg" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.254677 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9be2-account-create-update-4dzgs" event={"ID":"91704c86-8e91-4bf0-8ee0-def68c8c321a","Type":"ContainerDied","Data":"25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.254707 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25ee27f5552e95be76ac548b73be40b9f1190b0f05a269597ce05c31f39d8488" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.254711 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9be2-account-create-update-4dzgs" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260087 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-central-agent" containerID="cri-o://bd81f44bf4861352ba63859c0d734505ea286bf5d0b3f84d4dcb8a74f1ed8b08" gracePeriod=30 Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260338 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-q4zxq" event={"ID":"5908ab4f-64fe-466c-b085-0c70ca92a868","Type":"ContainerDied","Data":"bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98"} Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260381 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc9868ff9deb989bd90045fe88bd0c53669c162a2b5af442757ef8438040fb98" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260427 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-q4zxq" Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260470 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="sg-core" containerID="cri-o://22814d8bcb82819e027a42b2658a2ab6a63745732c11776755d91019f66affec" gracePeriod=30 Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260612 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="proxy-httpd" containerID="cri-o://3c6faed749bfecc752e9ababeb0f486dba03f803ea0432979872ad1ac8cb411e" gracePeriod=30 Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.260633 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-notification-agent" containerID="cri-o://883038450cbd09e6679340749573c428098091a5102e84524f748bd7e5eb8405" gracePeriod=30 Dec 01 19:53:23 crc kubenswrapper[4888]: I1201 19:53:23.775062 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.270906 4888 generic.go:334] "Generic (PLEG): container finished" podID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerID="3c6faed749bfecc752e9ababeb0f486dba03f803ea0432979872ad1ac8cb411e" exitCode=0 Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.270940 4888 generic.go:334] "Generic (PLEG): container finished" podID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerID="22814d8bcb82819e027a42b2658a2ab6a63745732c11776755d91019f66affec" exitCode=2 Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.270951 4888 generic.go:334] "Generic (PLEG): container finished" podID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerID="883038450cbd09e6679340749573c428098091a5102e84524f748bd7e5eb8405" exitCode=0 Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.270982 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerDied","Data":"3c6faed749bfecc752e9ababeb0f486dba03f803ea0432979872ad1ac8cb411e"} Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.271018 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerDied","Data":"22814d8bcb82819e027a42b2658a2ab6a63745732c11776755d91019f66affec"} Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.271028 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerDied","Data":"883038450cbd09e6679340749573c428098091a5102e84524f748bd7e5eb8405"} Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.421257 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.421761 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-log" containerID="cri-o://3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096" gracePeriod=30 Dec 01 19:53:24 crc kubenswrapper[4888]: I1201 19:53:24.421836 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-httpd" containerID="cri-o://2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7" gracePeriod=30 Dec 01 19:53:25 crc kubenswrapper[4888]: I1201 19:53:25.180751 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:25 crc kubenswrapper[4888]: I1201 19:53:25.181460 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-log" containerID="cri-o://1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b" gracePeriod=30 Dec 01 19:53:25 crc kubenswrapper[4888]: I1201 19:53:25.181562 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-httpd" containerID="cri-o://5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b" gracePeriod=30 Dec 01 19:53:25 crc kubenswrapper[4888]: I1201 19:53:25.284990 4888 generic.go:334] "Generic (PLEG): container finished" podID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerID="3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096" exitCode=143 Dec 01 19:53:25 crc kubenswrapper[4888]: I1201 19:53:25.285040 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerDied","Data":"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096"} Dec 01 19:53:26 crc kubenswrapper[4888]: I1201 19:53:26.296880 4888 generic.go:334] "Generic (PLEG): container finished" podID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerID="1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b" exitCode=143 Dec 01 19:53:26 crc kubenswrapper[4888]: I1201 19:53:26.296913 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerDied","Data":"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b"} Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.083269 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.183847 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.241986 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.242308 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.242351 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.242449 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt7bw\" (UniqueName: \"kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.242503 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.243261 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.243331 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.243407 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data\") pod \"72b66d64-7b46-48b1-9038-9c2623a5cb90\" (UID: \"72b66d64-7b46-48b1-9038-9c2623a5cb90\") " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.243720 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.244007 4888 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.246933 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs" (OuterVolumeSpecName: "logs") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.251526 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.252410 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw" (OuterVolumeSpecName: "kube-api-access-kt7bw") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "kube-api-access-kt7bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.256806 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts" (OuterVolumeSpecName: "scripts") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.273674 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.296925 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data" (OuterVolumeSpecName: "config-data") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.302397 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "72b66d64-7b46-48b1-9038-9c2623a5cb90" (UID: "72b66d64-7b46-48b1-9038-9c2623a5cb90"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.312795 4888 generic.go:334] "Generic (PLEG): container finished" podID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerID="2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7" exitCode=0 Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.312836 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerDied","Data":"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7"} Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.312854 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.312874 4888 scope.go:117] "RemoveContainer" containerID="2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.312861 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"72b66d64-7b46-48b1-9038-9c2623a5cb90","Type":"ContainerDied","Data":"e2d5f83f4c79d040e322b79e2b621d742fd97d1f3b24ab9d9ccd8bdb938d277f"} Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345390 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345426 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345466 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345479 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt7bw\" (UniqueName: \"kubernetes.io/projected/72b66d64-7b46-48b1-9038-9c2623a5cb90-kube-api-access-kt7bw\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345490 4888 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345501 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72b66d64-7b46-48b1-9038-9c2623a5cb90-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.345511 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b66d64-7b46-48b1-9038-9c2623a5cb90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.377984 4888 scope.go:117] "RemoveContainer" containerID="3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.378819 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.387444 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.395387 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.406018 4888 scope.go:117] "RemoveContainer" containerID="2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.407593 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7\": container with ID starting with 2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7 not found: ID does not exist" containerID="2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.407630 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7"} err="failed to get container status \"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7\": rpc error: code = NotFound desc = could not find container \"2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7\": container with ID starting with 2c4d8952f1fbebb66db53e867f6cfc61b3bef3f264084090c9abc09a1feee6e7 not found: ID does not exist" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.407654 4888 scope.go:117] "RemoveContainer" containerID="3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.408052 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096\": container with ID starting with 3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096 not found: ID does not exist" containerID="3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.408089 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096"} err="failed to get container status \"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096\": rpc error: code = NotFound desc = could not find container \"3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096\": container with ID starting with 3e3596c4c170dcc26348745491456cb3d7e2311b7666b01a4094a95146180096 not found: ID does not exist" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.409836 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410390 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-httpd" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410404 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-httpd" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410425 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f05189a-2b40-4e96-bc85-1b23401fc9d9" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410431 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f05189a-2b40-4e96-bc85-1b23401fc9d9" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410455 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53eb8db-1d23-4aac-85d2-36f1008834bb" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410463 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53eb8db-1d23-4aac-85d2-36f1008834bb" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410479 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-log" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410486 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-log" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410502 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d072d085-14a2-4137-a9a1-29882ab4fe55" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410509 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="d072d085-14a2-4137-a9a1-29882ab4fe55" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410527 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5908ab4f-64fe-466c-b085-0c70ca92a868" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410533 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5908ab4f-64fe-466c-b085-0c70ca92a868" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410545 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7528663a-6635-4fe0-8840-d0d0601799ce" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410552 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7528663a-6635-4fe0-8840-d0d0601799ce" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410566 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410572 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410580 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91704c86-8e91-4bf0-8ee0-def68c8c321a" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410589 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="91704c86-8e91-4bf0-8ee0-def68c8c321a" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: E1201 19:53:28.410599 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon-log" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410605 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon-log" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410810 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410829 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="d072d085-14a2-4137-a9a1-29882ab4fe55" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410837 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7528663a-6635-4fe0-8840-d0d0601799ce" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410849 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-log" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410861 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5908ab4f-64fe-466c-b085-0c70ca92a868" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410883 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f05189a-2b40-4e96-bc85-1b23401fc9d9" containerName="mariadb-database-create" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410894 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" containerName="glance-httpd" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410906 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a90a13-4ff1-4a48-8ced-df9b4765db1b" containerName="horizon-log" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410917 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53eb8db-1d23-4aac-85d2-36f1008834bb" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.410928 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="91704c86-8e91-4bf0-8ee0-def68c8c321a" containerName="mariadb-account-create-update" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.412081 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.414875 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.415474 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.432731 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.448544 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.465054 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72b66d64-7b46-48b1-9038-9c2623a5cb90" path="/var/lib/kubelet/pods/72b66d64-7b46-48b1-9038-9c2623a5cb90/volumes" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551003 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551395 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csfjz\" (UniqueName: \"kubernetes.io/projected/b9f3c82f-62d2-4a71-9832-223f1a735016-kube-api-access-csfjz\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551428 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551450 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551502 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551752 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.551799 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.552051 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660447 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660488 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660618 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660661 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660718 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csfjz\" (UniqueName: \"kubernetes.io/projected/b9f3c82f-62d2-4a71-9832-223f1a735016-kube-api-access-csfjz\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660699 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.661151 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.661898 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-logs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.660743 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9f3c82f-62d2-4a71-9832-223f1a735016-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.661943 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.661985 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.665847 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.667490 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.668431 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.670108 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f3c82f-62d2-4a71-9832-223f1a735016-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.684995 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csfjz\" (UniqueName: \"kubernetes.io/projected/b9f3c82f-62d2-4a71-9832-223f1a735016-kube-api-access-csfjz\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.706198 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"b9f3c82f-62d2-4a71-9832-223f1a735016\") " pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.820843 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 19:53:28 crc kubenswrapper[4888]: I1201 19:53:28.960928 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.071962 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072077 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072149 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6x9j\" (UniqueName: \"kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072227 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072287 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072335 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072375 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.072401 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts\") pod \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\" (UID: \"cf3f2dee-4f3a-4eff-90a7-5af07a142d76\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.073910 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs" (OuterVolumeSpecName: "logs") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.074941 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.081740 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.081850 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts" (OuterVolumeSpecName: "scripts") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.081909 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j" (OuterVolumeSpecName: "kube-api-access-p6x9j") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "kube-api-access-p6x9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.118360 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.150275 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.155148 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data" (OuterVolumeSpecName: "config-data") pod "cf3f2dee-4f3a-4eff-90a7-5af07a142d76" (UID: "cf3f2dee-4f3a-4eff-90a7-5af07a142d76"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.174972 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175011 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175025 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6x9j\" (UniqueName: \"kubernetes.io/projected/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-kube-api-access-p6x9j\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175035 4888 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175044 4888 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175054 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175061 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.175069 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf3f2dee-4f3a-4eff-90a7-5af07a142d76-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.206128 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.263360 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vmhn6"] Dec 01 19:53:29 crc kubenswrapper[4888]: E1201 19:53:29.263839 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-httpd" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.263859 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-httpd" Dec 01 19:53:29 crc kubenswrapper[4888]: E1201 19:53:29.263899 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-log" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.263908 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-log" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.264125 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-httpd" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.264159 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerName="glance-log" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.264950 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.267427 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cvqzt" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.267680 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.267850 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.276720 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.278056 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vmhn6"] Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.334978 4888 generic.go:334] "Generic (PLEG): container finished" podID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerID="bd81f44bf4861352ba63859c0d734505ea286bf5d0b3f84d4dcb8a74f1ed8b08" exitCode=0 Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.335049 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerDied","Data":"bd81f44bf4861352ba63859c0d734505ea286bf5d0b3f84d4dcb8a74f1ed8b08"} Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.358989 4888 generic.go:334] "Generic (PLEG): container finished" podID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" containerID="5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b" exitCode=0 Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.359040 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerDied","Data":"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b"} Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.359071 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cf3f2dee-4f3a-4eff-90a7-5af07a142d76","Type":"ContainerDied","Data":"7fbd2b14126e4e8b06093d69d7f98821adf50fdd9d56122d6a6c49897f334b46"} Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.359088 4888 scope.go:117] "RemoveContainer" containerID="5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.359232 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.377946 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.378012 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.378041 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w4xz\" (UniqueName: \"kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.378069 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.394962 4888 scope.go:117] "RemoveContainer" containerID="1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.403523 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.422575 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.439528 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.441604 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.444091 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.444351 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.456363 4888 scope.go:117] "RemoveContainer" containerID="5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b" Dec 01 19:53:29 crc kubenswrapper[4888]: E1201 19:53:29.457075 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b\": container with ID starting with 5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b not found: ID does not exist" containerID="5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.457110 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b"} err="failed to get container status \"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b\": rpc error: code = NotFound desc = could not find container \"5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b\": container with ID starting with 5740aa6cf42bf6cbd694c9c80ecb23c367313ea92c7ea5c0dd75d0c6f9849f8b not found: ID does not exist" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.457133 4888 scope.go:117] "RemoveContainer" containerID="1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.462748 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 19:53:29 crc kubenswrapper[4888]: E1201 19:53:29.469729 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b\": container with ID starting with 1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b not found: ID does not exist" containerID="1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.469768 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b"} err="failed to get container status \"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b\": rpc error: code = NotFound desc = could not find container \"1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b\": container with ID starting with 1630bd744a38030da15222add80b546dbd120ac8f6dcd1b95626d3a6cd1c213b not found: ID does not exist" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.479331 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.479400 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.479433 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w4xz\" (UniqueName: \"kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.479457 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.484844 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.485027 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.496056 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.499113 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.501201 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w4xz\" (UniqueName: \"kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz\") pod \"nova-cell0-conductor-db-sync-vmhn6\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.580534 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581249 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581373 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581414 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581482 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581538 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581556 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tclbv\" (UniqueName: \"kubernetes.io/projected/2f583b75-592c-438c-ae74-80dbd15c4eb1-kube-api-access-tclbv\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581579 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.581608 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.594497 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683449 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683580 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683628 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683715 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llr58\" (UniqueName: \"kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683771 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683805 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.683921 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts\") pod \"13958130-9b7e-42c3-8c2d-d2486ac32798\" (UID: \"13958130-9b7e-42c3-8c2d-d2486ac32798\") " Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684227 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684295 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684322 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684379 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684437 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684453 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tclbv\" (UniqueName: \"kubernetes.io/projected/2f583b75-592c-438c-ae74-80dbd15c4eb1-kube-api-access-tclbv\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684470 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.684493 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.685242 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.686883 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.687297 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.687639 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.688981 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f583b75-592c-438c-ae74-80dbd15c4eb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.689756 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts" (OuterVolumeSpecName: "scripts") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.696824 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.702148 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.705086 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58" (OuterVolumeSpecName: "kube-api-access-llr58") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "kube-api-access-llr58". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.705207 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.705677 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f583b75-592c-438c-ae74-80dbd15c4eb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.707107 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tclbv\" (UniqueName: \"kubernetes.io/projected/2f583b75-592c-438c-ae74-80dbd15c4eb1-kube-api-access-tclbv\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.746822 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"2f583b75-592c-438c-ae74-80dbd15c4eb1\") " pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.787269 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788441 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788487 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788507 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788520 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llr58\" (UniqueName: \"kubernetes.io/projected/13958130-9b7e-42c3-8c2d-d2486ac32798-kube-api-access-llr58\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788532 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.788542 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13958130-9b7e-42c3-8c2d-d2486ac32798-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.856405 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.891461 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.905808 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data" (OuterVolumeSpecName: "config-data") pod "13958130-9b7e-42c3-8c2d-d2486ac32798" (UID: "13958130-9b7e-42c3-8c2d-d2486ac32798"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:29 crc kubenswrapper[4888]: I1201 19:53:29.993447 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13958130-9b7e-42c3-8c2d-d2486ac32798-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.148204 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vmhn6"] Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.390442 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13958130-9b7e-42c3-8c2d-d2486ac32798","Type":"ContainerDied","Data":"6feb09dcbcdbc8f88d187b5f34ac78cc460d429e10cbde198c28a9fa64f84ad2"} Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.390510 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.390521 4888 scope.go:117] "RemoveContainer" containerID="3c6faed749bfecc752e9ababeb0f486dba03f803ea0432979872ad1ac8cb411e" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.402519 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f3c82f-62d2-4a71-9832-223f1a735016","Type":"ContainerStarted","Data":"b9794475d3e4a4133733909232216032b113503e879b237d3cae15d6f8231bbf"} Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.402991 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f3c82f-62d2-4a71-9832-223f1a735016","Type":"ContainerStarted","Data":"92203e590932766f6b26f7b5dc74f4cf0e75729dc949671ff03316e3f4ecebde"} Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.406469 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" event={"ID":"2c3317b0-1533-44ad-81c0-e0b0b150fa91","Type":"ContainerStarted","Data":"5871d703093fb67ca0d7ea5bcf06567ef43d229daf8f83ea13a08c91f1222b6b"} Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.411942 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.428116 4888 scope.go:117] "RemoveContainer" containerID="22814d8bcb82819e027a42b2658a2ab6a63745732c11776755d91019f66affec" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.431367 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:30 crc kubenswrapper[4888]: W1201 19:53:30.435804 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f583b75_592c_438c_ae74_80dbd15c4eb1.slice/crio-8ecaf1d513bd91f97e2ac6a13f883ba0c38883898a010a474c68078c8e644713 WatchSource:0}: Error finding container 8ecaf1d513bd91f97e2ac6a13f883ba0c38883898a010a474c68078c8e644713: Status 404 returned error can't find the container with id 8ecaf1d513bd91f97e2ac6a13f883ba0c38883898a010a474c68078c8e644713 Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.441715 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.499085 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" path="/var/lib/kubelet/pods/13958130-9b7e-42c3-8c2d-d2486ac32798/volumes" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.503790 4888 scope.go:117] "RemoveContainer" containerID="883038450cbd09e6679340749573c428098091a5102e84524f748bd7e5eb8405" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.505733 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf3f2dee-4f3a-4eff-90a7-5af07a142d76" path="/var/lib/kubelet/pods/cf3f2dee-4f3a-4eff-90a7-5af07a142d76/volumes" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.506628 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:30 crc kubenswrapper[4888]: E1201 19:53:30.507201 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-central-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507226 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-central-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: E1201 19:53:30.507246 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="sg-core" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507255 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="sg-core" Dec 01 19:53:30 crc kubenswrapper[4888]: E1201 19:53:30.507283 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="proxy-httpd" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507292 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="proxy-httpd" Dec 01 19:53:30 crc kubenswrapper[4888]: E1201 19:53:30.507309 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-notification-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507318 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-notification-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507571 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-central-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507601 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="ceilometer-notification-agent" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507624 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="proxy-httpd" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.507641 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="13958130-9b7e-42c3-8c2d-d2486ac32798" containerName="sg-core" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.512393 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.512533 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.514517 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.514643 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.589174 4888 scope.go:117] "RemoveContainer" containerID="bd81f44bf4861352ba63859c0d734505ea286bf5d0b3f84d4dcb8a74f1ed8b08" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.607754 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.607812 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg2xw\" (UniqueName: \"kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.607835 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.607939 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.608027 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.608128 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.608166 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710231 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710288 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg2xw\" (UniqueName: \"kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710326 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710366 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710401 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710448 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710474 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.710978 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.713388 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.719026 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.719196 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.719473 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.724171 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.732899 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg2xw\" (UniqueName: \"kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw\") pod \"ceilometer-0\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " pod="openstack/ceilometer-0" Dec 01 19:53:30 crc kubenswrapper[4888]: I1201 19:53:30.835690 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.333566 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.422709 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerStarted","Data":"862e6e11fbaaa8e2e4b1ac2725d2a863cd6f29dd269a3ebeda2cd4e2ee5d4ee1"} Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.427136 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2f583b75-592c-438c-ae74-80dbd15c4eb1","Type":"ContainerStarted","Data":"684910e7a2261a1c87cf8614af8993ff0ff6e18b85c44c2b8367f29f07cd72a6"} Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.427167 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2f583b75-592c-438c-ae74-80dbd15c4eb1","Type":"ContainerStarted","Data":"8ecaf1d513bd91f97e2ac6a13f883ba0c38883898a010a474c68078c8e644713"} Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.428962 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9f3c82f-62d2-4a71-9832-223f1a735016","Type":"ContainerStarted","Data":"ce27031246abbd1d2e4bcaa9694b41406bc0e9d696897ab8f02fe1923313ca6d"} Dec 01 19:53:31 crc kubenswrapper[4888]: I1201 19:53:31.465483 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.465459173 podStartE2EDuration="3.465459173s" podCreationTimestamp="2025-12-01 19:53:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:31.456792374 +0000 UTC m=+1211.327822298" watchObservedRunningTime="2025-12-01 19:53:31.465459173 +0000 UTC m=+1211.336489087" Dec 01 19:53:32 crc kubenswrapper[4888]: I1201 19:53:32.440357 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerStarted","Data":"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f"} Dec 01 19:53:32 crc kubenswrapper[4888]: I1201 19:53:32.442204 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"2f583b75-592c-438c-ae74-80dbd15c4eb1","Type":"ContainerStarted","Data":"6458a40efaae49bbc733a20e51432e31a15f98e361e0d565f5a40d5ebc5dae35"} Dec 01 19:53:32 crc kubenswrapper[4888]: I1201 19:53:32.465610 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.465590159 podStartE2EDuration="3.465590159s" podCreationTimestamp="2025-12-01 19:53:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:32.459979593 +0000 UTC m=+1212.331009497" watchObservedRunningTime="2025-12-01 19:53:32.465590159 +0000 UTC m=+1212.336620073" Dec 01 19:53:33 crc kubenswrapper[4888]: I1201 19:53:33.454263 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerStarted","Data":"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae"} Dec 01 19:53:38 crc kubenswrapper[4888]: E1201 19:53:38.463969 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.504723 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" event={"ID":"2c3317b0-1533-44ad-81c0-e0b0b150fa91","Type":"ContainerStarted","Data":"78afded45ea42ff0e084d6d38f874cd034a8d8fea965f75db8ff1068fd99d038"} Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.525914 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" podStartSLOduration=2.371009982 podStartE2EDuration="9.525890431s" podCreationTimestamp="2025-12-01 19:53:29 +0000 UTC" firstStartedPulling="2025-12-01 19:53:30.167028781 +0000 UTC m=+1210.038058695" lastFinishedPulling="2025-12-01 19:53:37.32190923 +0000 UTC m=+1217.192939144" observedRunningTime="2025-12-01 19:53:38.523029272 +0000 UTC m=+1218.394059186" watchObservedRunningTime="2025-12-01 19:53:38.525890431 +0000 UTC m=+1218.396920355" Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.528812 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerStarted","Data":"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7"} Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.821455 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.821792 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.859834 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 19:53:38 crc kubenswrapper[4888]: I1201 19:53:38.873539 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.540077 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerStarted","Data":"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76"} Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.540420 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.540442 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.567118 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.054503285 podStartE2EDuration="9.567082572s" podCreationTimestamp="2025-12-01 19:53:30 +0000 UTC" firstStartedPulling="2025-12-01 19:53:31.352629824 +0000 UTC m=+1211.223659738" lastFinishedPulling="2025-12-01 19:53:38.865209111 +0000 UTC m=+1218.736239025" observedRunningTime="2025-12-01 19:53:39.560654814 +0000 UTC m=+1219.431684738" watchObservedRunningTime="2025-12-01 19:53:39.567082572 +0000 UTC m=+1219.438112486" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.788947 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.789013 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.822124 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:39 crc kubenswrapper[4888]: I1201 19:53:39.831301 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:40 crc kubenswrapper[4888]: I1201 19:53:40.549447 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:53:40 crc kubenswrapper[4888]: I1201 19:53:40.550485 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:40 crc kubenswrapper[4888]: I1201 19:53:40.550512 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:41 crc kubenswrapper[4888]: I1201 19:53:41.627000 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 19:53:41 crc kubenswrapper[4888]: I1201 19:53:41.627715 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:53:41 crc kubenswrapper[4888]: I1201 19:53:41.635145 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 19:53:42 crc kubenswrapper[4888]: I1201 19:53:42.867940 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:42 crc kubenswrapper[4888]: I1201 19:53:42.868030 4888 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 19:53:42 crc kubenswrapper[4888]: I1201 19:53:42.883094 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 19:53:47 crc kubenswrapper[4888]: I1201 19:53:47.617725 4888 generic.go:334] "Generic (PLEG): container finished" podID="2c3317b0-1533-44ad-81c0-e0b0b150fa91" containerID="78afded45ea42ff0e084d6d38f874cd034a8d8fea965f75db8ff1068fd99d038" exitCode=0 Dec 01 19:53:47 crc kubenswrapper[4888]: I1201 19:53:47.617840 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" event={"ID":"2c3317b0-1533-44ad-81c0-e0b0b150fa91","Type":"ContainerDied","Data":"78afded45ea42ff0e084d6d38f874cd034a8d8fea965f75db8ff1068fd99d038"} Dec 01 19:53:48 crc kubenswrapper[4888]: E1201 19:53:48.732223 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.246299 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.390894 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data\") pod \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.392002 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle\") pod \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.392052 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts\") pod \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.392081 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w4xz\" (UniqueName: \"kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz\") pod \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\" (UID: \"2c3317b0-1533-44ad-81c0-e0b0b150fa91\") " Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.396357 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz" (OuterVolumeSpecName: "kube-api-access-7w4xz") pod "2c3317b0-1533-44ad-81c0-e0b0b150fa91" (UID: "2c3317b0-1533-44ad-81c0-e0b0b150fa91"). InnerVolumeSpecName "kube-api-access-7w4xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.397415 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts" (OuterVolumeSpecName: "scripts") pod "2c3317b0-1533-44ad-81c0-e0b0b150fa91" (UID: "2c3317b0-1533-44ad-81c0-e0b0b150fa91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.418484 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data" (OuterVolumeSpecName: "config-data") pod "2c3317b0-1533-44ad-81c0-e0b0b150fa91" (UID: "2c3317b0-1533-44ad-81c0-e0b0b150fa91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.421398 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c3317b0-1533-44ad-81c0-e0b0b150fa91" (UID: "2c3317b0-1533-44ad-81c0-e0b0b150fa91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.494169 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.494222 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.494233 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w4xz\" (UniqueName: \"kubernetes.io/projected/2c3317b0-1533-44ad-81c0-e0b0b150fa91-kube-api-access-7w4xz\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.494245 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c3317b0-1533-44ad-81c0-e0b0b150fa91-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.639290 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" event={"ID":"2c3317b0-1533-44ad-81c0-e0b0b150fa91","Type":"ContainerDied","Data":"5871d703093fb67ca0d7ea5bcf06567ef43d229daf8f83ea13a08c91f1222b6b"} Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.639338 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5871d703093fb67ca0d7ea5bcf06567ef43d229daf8f83ea13a08c91f1222b6b" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.639309 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vmhn6" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.731478 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 19:53:49 crc kubenswrapper[4888]: E1201 19:53:49.731915 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c3317b0-1533-44ad-81c0-e0b0b150fa91" containerName="nova-cell0-conductor-db-sync" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.731931 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c3317b0-1533-44ad-81c0-e0b0b150fa91" containerName="nova-cell0-conductor-db-sync" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.732115 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c3317b0-1533-44ad-81c0-e0b0b150fa91" containerName="nova-cell0-conductor-db-sync" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.732851 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.734991 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.735309 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cvqzt" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.743134 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.800178 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.800476 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnd9c\" (UniqueName: \"kubernetes.io/projected/b819abdf-a2be-4ee7-a019-15bfbc16578a-kube-api-access-rnd9c\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.800713 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.902523 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnd9c\" (UniqueName: \"kubernetes.io/projected/b819abdf-a2be-4ee7-a019-15bfbc16578a-kube-api-access-rnd9c\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.902590 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.902669 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.907365 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.907640 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b819abdf-a2be-4ee7-a019-15bfbc16578a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:49 crc kubenswrapper[4888]: I1201 19:53:49.921506 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnd9c\" (UniqueName: \"kubernetes.io/projected/b819abdf-a2be-4ee7-a019-15bfbc16578a-kube-api-access-rnd9c\") pod \"nova-cell0-conductor-0\" (UID: \"b819abdf-a2be-4ee7-a019-15bfbc16578a\") " pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:50 crc kubenswrapper[4888]: I1201 19:53:50.050395 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:50 crc kubenswrapper[4888]: I1201 19:53:50.488821 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 19:53:50 crc kubenswrapper[4888]: I1201 19:53:50.649593 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b819abdf-a2be-4ee7-a019-15bfbc16578a","Type":"ContainerStarted","Data":"44920c1e5fdbe3f21208ddd9380a70a906560a84b39ca4a345431ece0e086459"} Dec 01 19:53:51 crc kubenswrapper[4888]: I1201 19:53:51.663072 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b819abdf-a2be-4ee7-a019-15bfbc16578a","Type":"ContainerStarted","Data":"250870ae1829bbd8af03de81697878ded73b45243770250aa63809c31ab37750"} Dec 01 19:53:51 crc kubenswrapper[4888]: I1201 19:53:51.663734 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.077699 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.094682 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=6.094598532 podStartE2EDuration="6.094598532s" podCreationTimestamp="2025-12-01 19:53:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:51.688135079 +0000 UTC m=+1231.559165003" watchObservedRunningTime="2025-12-01 19:53:55.094598532 +0000 UTC m=+1234.965628466" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.510438 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-q4d97"] Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.511907 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.514576 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.514906 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.520567 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q4d97"] Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.609165 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.609304 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.609511 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.609852 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkhkc\" (UniqueName: \"kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.711765 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkhkc\" (UniqueName: \"kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.711844 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.711861 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.711909 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.719372 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.725809 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.733107 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.753543 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkhkc\" (UniqueName: \"kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc\") pod \"nova-cell0-cell-mapping-q4d97\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.759057 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.764767 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.785749 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.823642 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szvh4\" (UniqueName: \"kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.823790 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.823862 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.823912 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.865730 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.869502 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.926563 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szvh4\" (UniqueName: \"kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.926682 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.926727 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.926765 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.931294 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.963461 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szvh4\" (UniqueName: \"kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.981022 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.991038 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " pod="openstack/nova-api-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.991112 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.992740 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:53:55 crc kubenswrapper[4888]: I1201 19:53:55.996267 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.028002 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.028040 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtq4p\" (UniqueName: \"kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.028065 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.028135 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.029463 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.046347 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.047740 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.049460 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.066432 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.091260 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.092749 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.102087 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.117486 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131126 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131234 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131266 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pznr\" (UniqueName: \"kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131298 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm8sg\" (UniqueName: \"kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131323 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131366 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtq4p\" (UniqueName: \"kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131440 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131507 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131531 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.131566 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.134631 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.136124 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.136909 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.143178 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.143634 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.144885 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.160414 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.170960 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtq4p\" (UniqueName: \"kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p\") pod \"nova-metadata-0\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.234228 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.238141 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.238233 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.238319 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.238638 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.238775 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.239610 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.239647 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p86nn\" (UniqueName: \"kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.240047 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.240133 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pznr\" (UniqueName: \"kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.240167 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.240258 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm8sg\" (UniqueName: \"kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.288817 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm8sg\" (UniqueName: \"kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.289164 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.289503 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.290011 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.290346 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pznr\" (UniqueName: \"kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr\") pod \"nova-scheduler-0\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.290445 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.341870 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.341966 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.342022 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.342060 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.342123 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.342141 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p86nn\" (UniqueName: \"kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.344845 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.345125 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.347949 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.348683 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.350129 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.363115 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p86nn\" (UniqueName: \"kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn\") pod \"dnsmasq-dns-757b4f8459-rk96c\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.404380 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.424367 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.438400 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.479094 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.558868 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-q4d97"] Dec 01 19:53:56 crc kubenswrapper[4888]: W1201 19:53:56.586454 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e1aa47b_6022_4533_91e7_e6108f9e7b63.slice/crio-664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8 WatchSource:0}: Error finding container 664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8: Status 404 returned error can't find the container with id 664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8 Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.750539 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q4d97" event={"ID":"2e1aa47b-6022-4533-91e7-e6108f9e7b63","Type":"ContainerStarted","Data":"664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8"} Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.808359 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-659wd"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.809757 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.814412 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.814666 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.833315 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-659wd"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.863089 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.865945 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.866086 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.866131 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.866155 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6gqs\" (UniqueName: \"kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.971521 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.971598 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.971637 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6gqs\" (UniqueName: \"kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.971769 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:56 crc kubenswrapper[4888]: I1201 19:53:56.981218 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.001944 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.001947 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6gqs\" (UniqueName: \"kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.007447 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-659wd\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:57 crc kubenswrapper[4888]: W1201 19:53:57.137353 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc04488b_5fa6_47da_9e07_7f7e78aa446e.slice/crio-4439804330b5ef73a9bd05a4239c0569b8ca09458866e7f5b23517b3e8c8a3cf WatchSource:0}: Error finding container 4439804330b5ef73a9bd05a4239c0569b8ca09458866e7f5b23517b3e8c8a3cf: Status 404 returned error can't find the container with id 4439804330b5ef73a9bd05a4239c0569b8ca09458866e7f5b23517b3e8c8a3cf Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.139966 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.206215 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.272276 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:53:57 crc kubenswrapper[4888]: W1201 19:53:57.281374 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ff57d03_1df0_469e_a02e_257696b7d646.slice/crio-476ed79a8265681bf2cb312afc96e7db55b0fe8ad0f84267e62e4b56a78ab001 WatchSource:0}: Error finding container 476ed79a8265681bf2cb312afc96e7db55b0fe8ad0f84267e62e4b56a78ab001: Status 404 returned error can't find the container with id 476ed79a8265681bf2cb312afc96e7db55b0fe8ad0f84267e62e4b56a78ab001 Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.286560 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:53:57 crc kubenswrapper[4888]: W1201 19:53:57.288136 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7fe219b_5f85_484b_b6cb_495f1e6264f2.slice/crio-887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0 WatchSource:0}: Error finding container 887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0: Status 404 returned error can't find the container with id 887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0 Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.304617 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.763789 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-659wd"] Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.773981 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15b0cb64-b048-4160-aca0-0a1fc5560aef","Type":"ContainerStarted","Data":"d58ee47b404a78797def802234fef200d3167c2ed35509e17ed80faf465530f8"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.779697 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerStarted","Data":"4439804330b5ef73a9bd05a4239c0569b8ca09458866e7f5b23517b3e8c8a3cf"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.783430 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q4d97" event={"ID":"2e1aa47b-6022-4533-91e7-e6108f9e7b63","Type":"ContainerStarted","Data":"68ff2eb8ba45911c903718565d6de350e4e1f06b3cd5124cb7196b39a5e528e3"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.788813 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-659wd" event={"ID":"c5009fe9-671f-4c13-9c74-45d61ab93ca0","Type":"ContainerStarted","Data":"397d39bfcbd43b49b01fe19f40af06b4199dd66595551888ab197d4dd5b74b16"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.795598 4888 generic.go:334] "Generic (PLEG): container finished" podID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerID="de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f" exitCode=0 Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.796308 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" event={"ID":"f7fe219b-5f85-484b-b6cb-495f1e6264f2","Type":"ContainerDied","Data":"de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.796371 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" event={"ID":"f7fe219b-5f85-484b-b6cb-495f1e6264f2","Type":"ContainerStarted","Data":"887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.806081 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0ff57d03-1df0-469e-a02e-257696b7d646","Type":"ContainerStarted","Data":"476ed79a8265681bf2cb312afc96e7db55b0fe8ad0f84267e62e4b56a78ab001"} Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.807683 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-q4d97" podStartSLOduration=2.807667368 podStartE2EDuration="2.807667368s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:57.802969028 +0000 UTC m=+1237.673998962" watchObservedRunningTime="2025-12-01 19:53:57.807667368 +0000 UTC m=+1237.678697282" Dec 01 19:53:57 crc kubenswrapper[4888]: I1201 19:53:57.815009 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerStarted","Data":"e74755e0d5616cbaaa86154e959cf873ba3270b9a653b1d118c582c7a9068aac"} Dec 01 19:53:58 crc kubenswrapper[4888]: I1201 19:53:58.837254 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-659wd" event={"ID":"c5009fe9-671f-4c13-9c74-45d61ab93ca0","Type":"ContainerStarted","Data":"c213a0eb8684dcc9949d8b22ede26cfa17d51537ec00947b5b4eeaa54a5d484f"} Dec 01 19:53:58 crc kubenswrapper[4888]: I1201 19:53:58.861027 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" event={"ID":"f7fe219b-5f85-484b-b6cb-495f1e6264f2","Type":"ContainerStarted","Data":"53a33fe78c3c4d3fc20837f0e399a847af8b566e36f032513d2db9c50fbf7a0b"} Dec 01 19:53:58 crc kubenswrapper[4888]: I1201 19:53:58.861087 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:53:58 crc kubenswrapper[4888]: I1201 19:53:58.861597 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-659wd" podStartSLOduration=2.861582891 podStartE2EDuration="2.861582891s" podCreationTimestamp="2025-12-01 19:53:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:58.859881194 +0000 UTC m=+1238.730911148" watchObservedRunningTime="2025-12-01 19:53:58.861582891 +0000 UTC m=+1238.732612805" Dec 01 19:53:58 crc kubenswrapper[4888]: I1201 19:53:58.889553 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" podStartSLOduration=3.889534254 podStartE2EDuration="3.889534254s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:53:58.882434277 +0000 UTC m=+1238.753464181" watchObservedRunningTime="2025-12-01 19:53:58.889534254 +0000 UTC m=+1238.760564168" Dec 01 19:53:59 crc kubenswrapper[4888]: E1201 19:53:59.034634 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:53:59 crc kubenswrapper[4888]: I1201 19:53:59.239798 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:53:59 crc kubenswrapper[4888]: I1201 19:53:59.270758 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:00 crc kubenswrapper[4888]: I1201 19:54:00.852577 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.894990 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerStarted","Data":"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.895402 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerStarted","Data":"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.897698 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15b0cb64-b048-4160-aca0-0a1fc5560aef","Type":"ContainerStarted","Data":"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.897976 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="15b0cb64-b048-4160-aca0-0a1fc5560aef" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5" gracePeriod=30 Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.900372 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerStarted","Data":"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.900751 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerStarted","Data":"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.900499 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-metadata" containerID="cri-o://92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" gracePeriod=30 Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.900481 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-log" containerID="cri-o://14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" gracePeriod=30 Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.902252 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0ff57d03-1df0-469e-a02e-257696b7d646","Type":"ContainerStarted","Data":"a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771"} Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.927691 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.148845798 podStartE2EDuration="6.927665885s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="2025-12-01 19:53:56.860747352 +0000 UTC m=+1236.731777266" lastFinishedPulling="2025-12-01 19:54:00.639567429 +0000 UTC m=+1240.510597353" observedRunningTime="2025-12-01 19:54:01.921987968 +0000 UTC m=+1241.793017902" watchObservedRunningTime="2025-12-01 19:54:01.927665885 +0000 UTC m=+1241.798695799" Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.951025 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.596505654 podStartE2EDuration="6.95100075s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="2025-12-01 19:53:57.285180305 +0000 UTC m=+1237.156210219" lastFinishedPulling="2025-12-01 19:54:00.639675401 +0000 UTC m=+1240.510705315" observedRunningTime="2025-12-01 19:54:01.940840759 +0000 UTC m=+1241.811870683" watchObservedRunningTime="2025-12-01 19:54:01.95100075 +0000 UTC m=+1241.822030664" Dec 01 19:54:01 crc kubenswrapper[4888]: I1201 19:54:01.979479 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.482340798 podStartE2EDuration="6.979457327s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="2025-12-01 19:53:57.141810052 +0000 UTC m=+1237.012839966" lastFinishedPulling="2025-12-01 19:54:00.638926571 +0000 UTC m=+1240.509956495" observedRunningTime="2025-12-01 19:54:01.970127269 +0000 UTC m=+1241.841157183" watchObservedRunningTime="2025-12-01 19:54:01.979457327 +0000 UTC m=+1241.850487241" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.000627 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.642007511 podStartE2EDuration="7.000603091s" podCreationTimestamp="2025-12-01 19:53:55 +0000 UTC" firstStartedPulling="2025-12-01 19:53:57.290292016 +0000 UTC m=+1237.161321930" lastFinishedPulling="2025-12-01 19:54:00.648887596 +0000 UTC m=+1240.519917510" observedRunningTime="2025-12-01 19:54:01.988554738 +0000 UTC m=+1241.859584652" watchObservedRunningTime="2025-12-01 19:54:02.000603091 +0000 UTC m=+1241.871633015" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.494229 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.636838 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data\") pod \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.636945 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtq4p\" (UniqueName: \"kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p\") pod \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.636989 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs\") pod \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.637047 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle\") pod \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\" (UID: \"fc04488b-5fa6-47da-9e07-7f7e78aa446e\") " Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.637520 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs" (OuterVolumeSpecName: "logs") pod "fc04488b-5fa6-47da-9e07-7f7e78aa446e" (UID: "fc04488b-5fa6-47da-9e07-7f7e78aa446e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.638262 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc04488b-5fa6-47da-9e07-7f7e78aa446e-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.642425 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p" (OuterVolumeSpecName: "kube-api-access-xtq4p") pod "fc04488b-5fa6-47da-9e07-7f7e78aa446e" (UID: "fc04488b-5fa6-47da-9e07-7f7e78aa446e"). InnerVolumeSpecName "kube-api-access-xtq4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.672037 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data" (OuterVolumeSpecName: "config-data") pod "fc04488b-5fa6-47da-9e07-7f7e78aa446e" (UID: "fc04488b-5fa6-47da-9e07-7f7e78aa446e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.673961 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc04488b-5fa6-47da-9e07-7f7e78aa446e" (UID: "fc04488b-5fa6-47da-9e07-7f7e78aa446e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.739892 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.739935 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtq4p\" (UniqueName: \"kubernetes.io/projected/fc04488b-5fa6-47da-9e07-7f7e78aa446e-kube-api-access-xtq4p\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.739948 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc04488b-5fa6-47da-9e07-7f7e78aa446e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913332 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerID="92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" exitCode=0 Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913362 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerID="14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" exitCode=143 Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913387 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913413 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerDied","Data":"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88"} Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913464 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerDied","Data":"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15"} Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913480 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc04488b-5fa6-47da-9e07-7f7e78aa446e","Type":"ContainerDied","Data":"4439804330b5ef73a9bd05a4239c0569b8ca09458866e7f5b23517b3e8c8a3cf"} Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.913502 4888 scope.go:117] "RemoveContainer" containerID="92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.938997 4888 scope.go:117] "RemoveContainer" containerID="14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" Dec 01 19:54:02 crc kubenswrapper[4888]: I1201 19:54:02.967703 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.001281 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.004362 4888 scope.go:117] "RemoveContainer" containerID="92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" Dec 01 19:54:03 crc kubenswrapper[4888]: E1201 19:54:03.005688 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88\": container with ID starting with 92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88 not found: ID does not exist" containerID="92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.005728 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88"} err="failed to get container status \"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88\": rpc error: code = NotFound desc = could not find container \"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88\": container with ID starting with 92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88 not found: ID does not exist" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.005749 4888 scope.go:117] "RemoveContainer" containerID="14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" Dec 01 19:54:03 crc kubenswrapper[4888]: E1201 19:54:03.010474 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15\": container with ID starting with 14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15 not found: ID does not exist" containerID="14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.010517 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15"} err="failed to get container status \"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15\": rpc error: code = NotFound desc = could not find container \"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15\": container with ID starting with 14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15 not found: ID does not exist" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.010543 4888 scope.go:117] "RemoveContainer" containerID="92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.010852 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88"} err="failed to get container status \"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88\": rpc error: code = NotFound desc = could not find container \"92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88\": container with ID starting with 92108b7a4a2eedd33e2fcb8833e2f92a24e343fa81a93d484da652e17f424c88 not found: ID does not exist" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.010880 4888 scope.go:117] "RemoveContainer" containerID="14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.011163 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15"} err="failed to get container status \"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15\": rpc error: code = NotFound desc = could not find container \"14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15\": container with ID starting with 14dd11f2b75ba1c4fe617ea7e2a9fbf843f839f5c51c7663e121d45329d27c15 not found: ID does not exist" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.040770 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:03 crc kubenswrapper[4888]: E1201 19:54:03.041219 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-metadata" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.041237 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-metadata" Dec 01 19:54:03 crc kubenswrapper[4888]: E1201 19:54:03.041261 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-log" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.041269 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-log" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.041469 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-metadata" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.041490 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" containerName="nova-metadata-log" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.042761 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.047765 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.048793 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.049570 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.152147 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dql9p\" (UniqueName: \"kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.152309 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.152347 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.152501 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.152699 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.253882 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.253932 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.253969 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.254030 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.254066 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dql9p\" (UniqueName: \"kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.255477 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.259076 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.260199 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.261065 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.281712 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dql9p\" (UniqueName: \"kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p\") pod \"nova-metadata-0\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.371543 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.884755 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:03 crc kubenswrapper[4888]: I1201 19:54:03.928063 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerStarted","Data":"4c7f9a6d018f754c560276ec48e029f9d219bf3f53d4f7b9a41e1a2e063535a0"} Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.463077 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc04488b-5fa6-47da-9e07-7f7e78aa446e" path="/var/lib/kubelet/pods/fc04488b-5fa6-47da-9e07-7f7e78aa446e/volumes" Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.857594 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.857899 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b148905b-79c9-4889-bf95-4727a495f95a" containerName="kube-state-metrics" containerID="cri-o://624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f" gracePeriod=30 Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.942012 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerStarted","Data":"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f"} Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.942051 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerStarted","Data":"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a"} Dec 01 19:54:04 crc kubenswrapper[4888]: I1201 19:54:04.979953 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.979928507 podStartE2EDuration="2.979928507s" podCreationTimestamp="2025-12-01 19:54:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:04.959948165 +0000 UTC m=+1244.830978089" watchObservedRunningTime="2025-12-01 19:54:04.979928507 +0000 UTC m=+1244.850958421" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.377638 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.436416 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnptg\" (UniqueName: \"kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg\") pod \"b148905b-79c9-4889-bf95-4727a495f95a\" (UID: \"b148905b-79c9-4889-bf95-4727a495f95a\") " Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.443229 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg" (OuterVolumeSpecName: "kube-api-access-wnptg") pod "b148905b-79c9-4889-bf95-4727a495f95a" (UID: "b148905b-79c9-4889-bf95-4727a495f95a"). InnerVolumeSpecName "kube-api-access-wnptg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.538885 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnptg\" (UniqueName: \"kubernetes.io/projected/b148905b-79c9-4889-bf95-4727a495f95a-kube-api-access-wnptg\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.951841 4888 generic.go:334] "Generic (PLEG): container finished" podID="b148905b-79c9-4889-bf95-4727a495f95a" containerID="624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f" exitCode=2 Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.951931 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.951889 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b148905b-79c9-4889-bf95-4727a495f95a","Type":"ContainerDied","Data":"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f"} Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.952010 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b148905b-79c9-4889-bf95-4727a495f95a","Type":"ContainerDied","Data":"e3d64e850300a6fdfe43ff103b511d13bd1f3e18c8aa0bb1178025d9aafae341"} Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.952033 4888 scope.go:117] "RemoveContainer" containerID="624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.955495 4888 generic.go:334] "Generic (PLEG): container finished" podID="2e1aa47b-6022-4533-91e7-e6108f9e7b63" containerID="68ff2eb8ba45911c903718565d6de350e4e1f06b3cd5124cb7196b39a5e528e3" exitCode=0 Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.955560 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q4d97" event={"ID":"2e1aa47b-6022-4533-91e7-e6108f9e7b63","Type":"ContainerDied","Data":"68ff2eb8ba45911c903718565d6de350e4e1f06b3cd5124cb7196b39a5e528e3"} Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.958028 4888 generic.go:334] "Generic (PLEG): container finished" podID="c5009fe9-671f-4c13-9c74-45d61ab93ca0" containerID="c213a0eb8684dcc9949d8b22ede26cfa17d51537ec00947b5b4eeaa54a5d484f" exitCode=0 Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.958125 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-659wd" event={"ID":"c5009fe9-671f-4c13-9c74-45d61ab93ca0","Type":"ContainerDied","Data":"c213a0eb8684dcc9949d8b22ede26cfa17d51537ec00947b5b4eeaa54a5d484f"} Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.996174 4888 scope.go:117] "RemoveContainer" containerID="624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f" Dec 01 19:54:05 crc kubenswrapper[4888]: E1201 19:54:05.996803 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f\": container with ID starting with 624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f not found: ID does not exist" containerID="624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f" Dec 01 19:54:05 crc kubenswrapper[4888]: I1201 19:54:05.996892 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f"} err="failed to get container status \"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f\": rpc error: code = NotFound desc = could not find container \"624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f\": container with ID starting with 624693f3c54c1e10e1d32652cff0b4c5c4617b982b5bf3fbf71e26701ce8e74f not found: ID does not exist" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.029966 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.039738 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.053790 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: E1201 19:54:06.065597 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b148905b-79c9-4889-bf95-4727a495f95a" containerName="kube-state-metrics" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.065632 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b148905b-79c9-4889-bf95-4727a495f95a" containerName="kube-state-metrics" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.065923 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b148905b-79c9-4889-bf95-4727a495f95a" containerName="kube-state-metrics" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.066857 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.071074 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.071506 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.076920 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.167259 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.167327 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.259451 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4bc4\" (UniqueName: \"kubernetes.io/projected/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-api-access-b4bc4\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.259716 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.259812 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.259854 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.361492 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4bc4\" (UniqueName: \"kubernetes.io/projected/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-api-access-b4bc4\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.361777 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.361941 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.362035 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.366033 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.366359 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.366565 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.378667 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4bc4\" (UniqueName: \"kubernetes.io/projected/26b544b6-2ef6-40f8-8cf6-0834d6d7bc39-kube-api-access-b4bc4\") pod \"kube-state-metrics-0\" (UID: \"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39\") " pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.396335 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.427988 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.439871 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.439940 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.467010 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b148905b-79c9-4889-bf95-4727a495f95a" path="/var/lib/kubelet/pods/b148905b-79c9-4889-bf95-4727a495f95a/volumes" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.472852 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.481382 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.560959 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.561240 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="dnsmasq-dns" containerID="cri-o://fbd5c0ef319cc65056c2b8875f83836d53a7047c616e4eae7943fbcf89d5c736" gracePeriod=10 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.799064 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.799617 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-central-agent" containerID="cri-o://302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f" gracePeriod=30 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.800148 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="proxy-httpd" containerID="cri-o://3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76" gracePeriod=30 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.800218 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="sg-core" containerID="cri-o://dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7" gracePeriod=30 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.800275 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-notification-agent" containerID="cri-o://7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae" gracePeriod=30 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.907779 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.995492 4888 generic.go:334] "Generic (PLEG): container finished" podID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerID="fbd5c0ef319cc65056c2b8875f83836d53a7047c616e4eae7943fbcf89d5c736" exitCode=0 Dec 01 19:54:06 crc kubenswrapper[4888]: I1201 19:54:06.995606 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" event={"ID":"e325858d-ad0b-40ca-8cb1-5b2b14bdc908","Type":"ContainerDied","Data":"fbd5c0ef319cc65056c2b8875f83836d53a7047c616e4eae7943fbcf89d5c736"} Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.009395 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39","Type":"ContainerStarted","Data":"f520cc9d64eb9043f2f87cb724c3d1b28da74224234a04501aad91ca5de8f695"} Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.016636 4888 generic.go:334] "Generic (PLEG): container finished" podID="22837130-9717-4f10-80ad-99aeebda6fcf" containerID="3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76" exitCode=0 Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.016667 4888 generic.go:334] "Generic (PLEG): container finished" podID="22837130-9717-4f10-80ad-99aeebda6fcf" containerID="dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7" exitCode=2 Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.016712 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerDied","Data":"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76"} Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.016743 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerDied","Data":"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7"} Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.066526 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.073175 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.180890 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.182004 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.182055 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.182078 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.182100 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mg6w\" (UniqueName: \"kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.182126 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc\") pod \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\" (UID: \"e325858d-ad0b-40ca-8cb1-5b2b14bdc908\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.209317 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w" (OuterVolumeSpecName: "kube-api-access-2mg6w") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "kube-api-access-2mg6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.254342 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.254416 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.260476 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.260708 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.263337 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config" (OuterVolumeSpecName: "config") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.283420 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.284876 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.284899 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.284910 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.284920 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.284931 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mg6w\" (UniqueName: \"kubernetes.io/projected/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-kube-api-access-2mg6w\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.302650 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e325858d-ad0b-40ca-8cb1-5b2b14bdc908" (UID: "e325858d-ad0b-40ca-8cb1-5b2b14bdc908"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.387877 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e325858d-ad0b-40ca-8cb1-5b2b14bdc908-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.599603 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.619579 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795246 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data\") pod \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795341 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkhkc\" (UniqueName: \"kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc\") pod \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795377 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle\") pod \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795415 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts\") pod \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\" (UID: \"2e1aa47b-6022-4533-91e7-e6108f9e7b63\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795444 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6gqs\" (UniqueName: \"kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs\") pod \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795477 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data\") pod \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.795502 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts\") pod \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.796131 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle\") pod \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\" (UID: \"c5009fe9-671f-4c13-9c74-45d61ab93ca0\") " Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.801448 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs" (OuterVolumeSpecName: "kube-api-access-q6gqs") pod "c5009fe9-671f-4c13-9c74-45d61ab93ca0" (UID: "c5009fe9-671f-4c13-9c74-45d61ab93ca0"). InnerVolumeSpecName "kube-api-access-q6gqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.802743 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts" (OuterVolumeSpecName: "scripts") pod "c5009fe9-671f-4c13-9c74-45d61ab93ca0" (UID: "c5009fe9-671f-4c13-9c74-45d61ab93ca0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.818168 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts" (OuterVolumeSpecName: "scripts") pod "2e1aa47b-6022-4533-91e7-e6108f9e7b63" (UID: "2e1aa47b-6022-4533-91e7-e6108f9e7b63"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.819836 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc" (OuterVolumeSpecName: "kube-api-access-zkhkc") pod "2e1aa47b-6022-4533-91e7-e6108f9e7b63" (UID: "2e1aa47b-6022-4533-91e7-e6108f9e7b63"). InnerVolumeSpecName "kube-api-access-zkhkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.826386 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e1aa47b-6022-4533-91e7-e6108f9e7b63" (UID: "2e1aa47b-6022-4533-91e7-e6108f9e7b63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.829964 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data" (OuterVolumeSpecName: "config-data") pod "c5009fe9-671f-4c13-9c74-45d61ab93ca0" (UID: "c5009fe9-671f-4c13-9c74-45d61ab93ca0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.833063 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5009fe9-671f-4c13-9c74-45d61ab93ca0" (UID: "c5009fe9-671f-4c13-9c74-45d61ab93ca0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.867709 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data" (OuterVolumeSpecName: "config-data") pod "2e1aa47b-6022-4533-91e7-e6108f9e7b63" (UID: "2e1aa47b-6022-4533-91e7-e6108f9e7b63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898438 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkhkc\" (UniqueName: \"kubernetes.io/projected/2e1aa47b-6022-4533-91e7-e6108f9e7b63-kube-api-access-zkhkc\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898482 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898493 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898503 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6gqs\" (UniqueName: \"kubernetes.io/projected/c5009fe9-671f-4c13-9c74-45d61ab93ca0-kube-api-access-q6gqs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898511 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898519 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898526 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5009fe9-671f-4c13-9c74-45d61ab93ca0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:07 crc kubenswrapper[4888]: I1201 19:54:07.898535 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e1aa47b-6022-4533-91e7-e6108f9e7b63-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.073465 4888 generic.go:334] "Generic (PLEG): container finished" podID="22837130-9717-4f10-80ad-99aeebda6fcf" containerID="302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f" exitCode=0 Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.073657 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerDied","Data":"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f"} Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.096466 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" event={"ID":"e325858d-ad0b-40ca-8cb1-5b2b14bdc908","Type":"ContainerDied","Data":"657a5dcef87ec0dafff1dee266fe9b720348c3ffe50e0076b5aebd7bde671c74"} Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.096557 4888 scope.go:117] "RemoveContainer" containerID="fbd5c0ef319cc65056c2b8875f83836d53a7047c616e4eae7943fbcf89d5c736" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.096855 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-ffc2d" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.107312 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 19:54:08 crc kubenswrapper[4888]: E1201 19:54:08.108004 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e1aa47b-6022-4533-91e7-e6108f9e7b63" containerName="nova-manage" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108018 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e1aa47b-6022-4533-91e7-e6108f9e7b63" containerName="nova-manage" Dec 01 19:54:08 crc kubenswrapper[4888]: E1201 19:54:08.108048 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5009fe9-671f-4c13-9c74-45d61ab93ca0" containerName="nova-cell1-conductor-db-sync" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108054 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5009fe9-671f-4c13-9c74-45d61ab93ca0" containerName="nova-cell1-conductor-db-sync" Dec 01 19:54:08 crc kubenswrapper[4888]: E1201 19:54:08.108061 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="dnsmasq-dns" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108068 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="dnsmasq-dns" Dec 01 19:54:08 crc kubenswrapper[4888]: E1201 19:54:08.108092 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="init" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108098 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="init" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108412 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5009fe9-671f-4c13-9c74-45d61ab93ca0" containerName="nova-cell1-conductor-db-sync" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108443 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e1aa47b-6022-4533-91e7-e6108f9e7b63" containerName="nova-manage" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.108460 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" containerName="dnsmasq-dns" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.109359 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.109376 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"26b544b6-2ef6-40f8-8cf6-0834d6d7bc39","Type":"ContainerStarted","Data":"8e97e334617a00b9b793034d8c5a9c425fb8d6db348bcde37848a60ec6316896"} Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.109466 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.117634 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-q4d97" event={"ID":"2e1aa47b-6022-4533-91e7-e6108f9e7b63","Type":"ContainerDied","Data":"664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8"} Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.117860 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="664794fdacac20dba2095da9263f9d208877ecb3744c7663d982eef2b300f0f8" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.117935 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-q4d97" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.119956 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-659wd" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.119870 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-659wd" event={"ID":"c5009fe9-671f-4c13-9c74-45d61ab93ca0","Type":"ContainerDied","Data":"397d39bfcbd43b49b01fe19f40af06b4199dd66595551888ab197d4dd5b74b16"} Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.121469 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="397d39bfcbd43b49b01fe19f40af06b4199dd66595551888ab197d4dd5b74b16" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.142809 4888 scope.go:117] "RemoveContainer" containerID="b6a12edf7de112c7ad23c01ac92952ce264ea669cb2219932028d46e0417ffb2" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.147946 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.155627 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.754650987 podStartE2EDuration="2.155601991s" podCreationTimestamp="2025-12-01 19:54:06 +0000 UTC" firstStartedPulling="2025-12-01 19:54:06.918877735 +0000 UTC m=+1246.789907649" lastFinishedPulling="2025-12-01 19:54:07.319828739 +0000 UTC m=+1247.190858653" observedRunningTime="2025-12-01 19:54:08.126379363 +0000 UTC m=+1247.997409297" watchObservedRunningTime="2025-12-01 19:54:08.155601991 +0000 UTC m=+1248.026631905" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.204605 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.227881 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-ffc2d"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.262385 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.262668 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-log" containerID="cri-o://7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7" gracePeriod=30 Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.263141 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-api" containerID="cri-o://d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e" gracePeriod=30 Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.282089 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.311015 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.311334 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.311502 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zg8n\" (UniqueName: \"kubernetes.io/projected/045ca115-d337-48ae-bfce-0df835c95bc8-kube-api-access-9zg8n\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.320422 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.320852 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-log" containerID="cri-o://8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" gracePeriod=30 Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.321681 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-metadata" containerID="cri-o://7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" gracePeriod=30 Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.371936 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.372513 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.413599 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.413764 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.413887 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zg8n\" (UniqueName: \"kubernetes.io/projected/045ca115-d337-48ae-bfce-0df835c95bc8-kube-api-access-9zg8n\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.418233 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.418398 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/045ca115-d337-48ae-bfce-0df835c95bc8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.436269 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zg8n\" (UniqueName: \"kubernetes.io/projected/045ca115-d337-48ae-bfce-0df835c95bc8-kube-api-access-9zg8n\") pod \"nova-cell1-conductor-0\" (UID: \"045ca115-d337-48ae-bfce-0df835c95bc8\") " pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.462842 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e325858d-ad0b-40ca-8cb1-5b2b14bdc908" path="/var/lib/kubelet/pods/e325858d-ad0b-40ca-8cb1-5b2b14bdc908/volumes" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.479923 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.817902 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.925848 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dql9p\" (UniqueName: \"kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p\") pod \"8f7408bf-8993-445d-9163-a675ddef1b34\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.925983 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle\") pod \"8f7408bf-8993-445d-9163-a675ddef1b34\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.926029 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs\") pod \"8f7408bf-8993-445d-9163-a675ddef1b34\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.926108 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data\") pod \"8f7408bf-8993-445d-9163-a675ddef1b34\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.926166 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs\") pod \"8f7408bf-8993-445d-9163-a675ddef1b34\" (UID: \"8f7408bf-8993-445d-9163-a675ddef1b34\") " Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.944370 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs" (OuterVolumeSpecName: "logs") pod "8f7408bf-8993-445d-9163-a675ddef1b34" (UID: "8f7408bf-8993-445d-9163-a675ddef1b34"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.949149 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p" (OuterVolumeSpecName: "kube-api-access-dql9p") pod "8f7408bf-8993-445d-9163-a675ddef1b34" (UID: "8f7408bf-8993-445d-9163-a675ddef1b34"). InnerVolumeSpecName "kube-api-access-dql9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:08 crc kubenswrapper[4888]: I1201 19:54:08.999345 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data" (OuterVolumeSpecName: "config-data") pod "8f7408bf-8993-445d-9163-a675ddef1b34" (UID: "8f7408bf-8993-445d-9163-a675ddef1b34"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.026524 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f7408bf-8993-445d-9163-a675ddef1b34" (UID: "8f7408bf-8993-445d-9163-a675ddef1b34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.027913 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dql9p\" (UniqueName: \"kubernetes.io/projected/8f7408bf-8993-445d-9163-a675ddef1b34-kube-api-access-dql9p\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.027933 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.027941 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7408bf-8993-445d-9163-a675ddef1b34-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.027952 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.071530 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8f7408bf-8993-445d-9163-a675ddef1b34" (UID: "8f7408bf-8993-445d-9163-a675ddef1b34"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.072416 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.132370 4888 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7408bf-8993-445d-9163-a675ddef1b34-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133676 4888 generic.go:334] "Generic (PLEG): container finished" podID="8f7408bf-8993-445d-9163-a675ddef1b34" containerID="7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" exitCode=0 Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133706 4888 generic.go:334] "Generic (PLEG): container finished" podID="8f7408bf-8993-445d-9163-a675ddef1b34" containerID="8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" exitCode=143 Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133728 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133735 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerDied","Data":"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f"} Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133794 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerDied","Data":"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a"} Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133806 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8f7408bf-8993-445d-9163-a675ddef1b34","Type":"ContainerDied","Data":"4c7f9a6d018f754c560276ec48e029f9d219bf3f53d4f7b9a41e1a2e063535a0"} Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.133822 4888 scope.go:117] "RemoveContainer" containerID="7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.148383 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"045ca115-d337-48ae-bfce-0df835c95bc8","Type":"ContainerStarted","Data":"0b67cd196a5789a1906124ee2ce0ddfb4332ab882ba99816b487869b98aa3a60"} Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.156508 4888 generic.go:334] "Generic (PLEG): container finished" podID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerID="7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7" exitCode=143 Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.156667 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerDied","Data":"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7"} Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.157018 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" containerName="nova-scheduler-scheduler" containerID="cri-o://a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" gracePeriod=30 Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.184885 4888 scope.go:117] "RemoveContainer" containerID="8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.194314 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.208685 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.214080 4888 scope.go:117] "RemoveContainer" containerID="7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" Dec 01 19:54:09 crc kubenswrapper[4888]: E1201 19:54:09.215662 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f\": container with ID starting with 7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f not found: ID does not exist" containerID="7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.215725 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f"} err="failed to get container status \"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f\": rpc error: code = NotFound desc = could not find container \"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f\": container with ID starting with 7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f not found: ID does not exist" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.215757 4888 scope.go:117] "RemoveContainer" containerID="8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" Dec 01 19:54:09 crc kubenswrapper[4888]: E1201 19:54:09.219764 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a\": container with ID starting with 8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a not found: ID does not exist" containerID="8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.219806 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a"} err="failed to get container status \"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a\": rpc error: code = NotFound desc = could not find container \"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a\": container with ID starting with 8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a not found: ID does not exist" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.219834 4888 scope.go:117] "RemoveContainer" containerID="7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.220393 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f"} err="failed to get container status \"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f\": rpc error: code = NotFound desc = could not find container \"7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f\": container with ID starting with 7ce40587315ade93f0fb69b3f97592fe99d0097bc2ca332f13829a096f512a1f not found: ID does not exist" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.220433 4888 scope.go:117] "RemoveContainer" containerID="8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.220647 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a"} err="failed to get container status \"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a\": rpc error: code = NotFound desc = could not find container \"8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a\": container with ID starting with 8870f2fbe0a676625d4b760a73c4bb03b78b8826751ae5b04a72aea3e223243a not found: ID does not exist" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.227608 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:09 crc kubenswrapper[4888]: E1201 19:54:09.228038 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-metadata" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.228055 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-metadata" Dec 01 19:54:09 crc kubenswrapper[4888]: E1201 19:54:09.228084 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-log" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.228092 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-log" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.228318 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-log" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.228344 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" containerName="nova-metadata-metadata" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.229741 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.232476 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.232626 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.239076 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.351536 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.351594 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.351655 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf8gb\" (UniqueName: \"kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.351696 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.351738 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: E1201 19:54:09.388083 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f7408bf_8993_445d_9163_a675ddef1b34.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22837130_9717_4f10_80ad_99aeebda6fcf.slice/crio-7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22837130_9717_4f10_80ad_99aeebda6fcf.slice/crio-conmon-7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.453815 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.454113 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf8gb\" (UniqueName: \"kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.454144 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.454169 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.454283 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.454644 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.459032 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.459468 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.466675 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.475106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf8gb\" (UniqueName: \"kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb\") pod \"nova-metadata-0\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.551378 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.717993 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.865514 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.865550 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.865669 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.865772 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.866153 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.865807 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg2xw\" (UniqueName: \"kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.866487 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.866818 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd\") pod \"22837130-9717-4f10-80ad-99aeebda6fcf\" (UID: \"22837130-9717-4f10-80ad-99aeebda6fcf\") " Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.867280 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.867453 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.867468 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/22837130-9717-4f10-80ad-99aeebda6fcf-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.872999 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts" (OuterVolumeSpecName: "scripts") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.873535 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw" (OuterVolumeSpecName: "kube-api-access-hg2xw") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "kube-api-access-hg2xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.897560 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.956766 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.968863 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.968890 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.968901 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg2xw\" (UniqueName: \"kubernetes.io/projected/22837130-9717-4f10-80ad-99aeebda6fcf-kube-api-access-hg2xw\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.968911 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:09 crc kubenswrapper[4888]: I1201 19:54:09.993014 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data" (OuterVolumeSpecName: "config-data") pod "22837130-9717-4f10-80ad-99aeebda6fcf" (UID: "22837130-9717-4f10-80ad-99aeebda6fcf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.062284 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.070378 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22837130-9717-4f10-80ad-99aeebda6fcf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.167344 4888 generic.go:334] "Generic (PLEG): container finished" podID="22837130-9717-4f10-80ad-99aeebda6fcf" containerID="7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae" exitCode=0 Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.167433 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerDied","Data":"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae"} Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.167476 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"22837130-9717-4f10-80ad-99aeebda6fcf","Type":"ContainerDied","Data":"862e6e11fbaaa8e2e4b1ac2725d2a863cd6f29dd269a3ebeda2cd4e2ee5d4ee1"} Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.167499 4888 scope.go:117] "RemoveContainer" containerID="3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.167644 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.177482 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"045ca115-d337-48ae-bfce-0df835c95bc8","Type":"ContainerStarted","Data":"aa7d0b3a40564e72020ee75d9bc90cd4cf0948b72af6e2a7c4d8711d949bc9ea"} Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.178646 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.181314 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerStarted","Data":"d302cb69a31b675b5ba6a9f916437ecec3d1e69b389d71f2a793eed80a93b636"} Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.195519 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.195489359 podStartE2EDuration="2.195489359s" podCreationTimestamp="2025-12-01 19:54:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:10.19296746 +0000 UTC m=+1250.063997374" watchObservedRunningTime="2025-12-01 19:54:10.195489359 +0000 UTC m=+1250.066519273" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.214015 4888 scope.go:117] "RemoveContainer" containerID="dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.237886 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.250299 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.258066 4888 scope.go:117] "RemoveContainer" containerID="7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.262357 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.264143 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-notification-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.264161 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-notification-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.264227 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="proxy-httpd" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.264234 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="proxy-httpd" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.264270 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="sg-core" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.264278 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="sg-core" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.264329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-central-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.264335 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-central-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.265983 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-central-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.266020 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="ceilometer-notification-agent" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.266039 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="sg-core" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.266063 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" containerName="proxy-httpd" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.299623 4888 scope.go:117] "RemoveContainer" containerID="302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.303097 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.306511 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.306756 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.306946 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.335207 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.340887 4888 scope.go:117] "RemoveContainer" containerID="3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.341489 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76\": container with ID starting with 3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76 not found: ID does not exist" containerID="3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.341556 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76"} err="failed to get container status \"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76\": rpc error: code = NotFound desc = could not find container \"3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76\": container with ID starting with 3196e407006fb2eb9a8d273cf7cf15b28dab5a013259b6218bb83afd31c3ce76 not found: ID does not exist" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.341600 4888 scope.go:117] "RemoveContainer" containerID="dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.344545 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7\": container with ID starting with dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7 not found: ID does not exist" containerID="dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.344607 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7"} err="failed to get container status \"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7\": rpc error: code = NotFound desc = could not find container \"dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7\": container with ID starting with dce8ebbaafd6024577739296072418c73ebecc40d230b42323c78945f12123e7 not found: ID does not exist" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.344644 4888 scope.go:117] "RemoveContainer" containerID="7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.345060 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae\": container with ID starting with 7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae not found: ID does not exist" containerID="7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.345117 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae"} err="failed to get container status \"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae\": rpc error: code = NotFound desc = could not find container \"7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae\": container with ID starting with 7e3fee816c219049a6d0eb292a05fcc2a6c2a9af321a1817b5e0d600194dd0ae not found: ID does not exist" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.345157 4888 scope.go:117] "RemoveContainer" containerID="302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f" Dec 01 19:54:10 crc kubenswrapper[4888]: E1201 19:54:10.345525 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f\": container with ID starting with 302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f not found: ID does not exist" containerID="302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.345556 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f"} err="failed to get container status \"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f\": rpc error: code = NotFound desc = could not find container \"302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f\": container with ID starting with 302b929ed34e7f88ba6c3602b0851074389c4235aec6f0adadc9991c1b18749f not found: ID does not exist" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398593 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398640 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398730 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398755 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398788 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.398929 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.399006 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.399089 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5vtc\" (UniqueName: \"kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.478603 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22837130-9717-4f10-80ad-99aeebda6fcf" path="/var/lib/kubelet/pods/22837130-9717-4f10-80ad-99aeebda6fcf/volumes" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.479569 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f7408bf-8993-445d-9163-a675ddef1b34" path="/var/lib/kubelet/pods/8f7408bf-8993-445d-9163-a675ddef1b34/volumes" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.510991 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511058 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511225 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511297 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511551 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511713 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511910 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.511990 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.512032 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.512086 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5vtc\" (UniqueName: \"kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.516021 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.516476 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.517375 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.517886 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.519248 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.532110 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5vtc\" (UniqueName: \"kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc\") pod \"ceilometer-0\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " pod="openstack/ceilometer-0" Dec 01 19:54:10 crc kubenswrapper[4888]: I1201 19:54:10.636703 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:11 crc kubenswrapper[4888]: I1201 19:54:11.100823 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:11 crc kubenswrapper[4888]: I1201 19:54:11.201392 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerStarted","Data":"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007"} Dec 01 19:54:11 crc kubenswrapper[4888]: I1201 19:54:11.201454 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerStarted","Data":"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47"} Dec 01 19:54:11 crc kubenswrapper[4888]: I1201 19:54:11.208258 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerStarted","Data":"3f56b547a568572a93e33ba3c09a496c7ed11e7bd87c38c46463a44e735b7681"} Dec 01 19:54:11 crc kubenswrapper[4888]: I1201 19:54:11.216856 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.216837401 podStartE2EDuration="2.216837401s" podCreationTimestamp="2025-12-01 19:54:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:11.216723138 +0000 UTC m=+1251.087753072" watchObservedRunningTime="2025-12-01 19:54:11.216837401 +0000 UTC m=+1251.087867315" Dec 01 19:54:11 crc kubenswrapper[4888]: E1201 19:54:11.444307 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:11 crc kubenswrapper[4888]: E1201 19:54:11.445812 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:11 crc kubenswrapper[4888]: E1201 19:54:11.447499 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:11 crc kubenswrapper[4888]: E1201 19:54:11.447549 4888 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" containerName="nova-scheduler-scheduler" Dec 01 19:54:12 crc kubenswrapper[4888]: I1201 19:54:12.217495 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerStarted","Data":"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52"} Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.238710 4888 generic.go:334] "Generic (PLEG): container finished" podID="0ff57d03-1df0-469e-a02e-257696b7d646" containerID="a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" exitCode=0 Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.238809 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0ff57d03-1df0-469e-a02e-257696b7d646","Type":"ContainerDied","Data":"a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771"} Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.268740 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerStarted","Data":"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2"} Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.578348 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.634868 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle\") pod \"0ff57d03-1df0-469e-a02e-257696b7d646\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.635017 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data\") pod \"0ff57d03-1df0-469e-a02e-257696b7d646\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.635146 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pznr\" (UniqueName: \"kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr\") pod \"0ff57d03-1df0-469e-a02e-257696b7d646\" (UID: \"0ff57d03-1df0-469e-a02e-257696b7d646\") " Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.657439 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr" (OuterVolumeSpecName: "kube-api-access-7pznr") pod "0ff57d03-1df0-469e-a02e-257696b7d646" (UID: "0ff57d03-1df0-469e-a02e-257696b7d646"). InnerVolumeSpecName "kube-api-access-7pznr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.682823 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ff57d03-1df0-469e-a02e-257696b7d646" (UID: "0ff57d03-1df0-469e-a02e-257696b7d646"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.685995 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data" (OuterVolumeSpecName: "config-data") pod "0ff57d03-1df0-469e-a02e-257696b7d646" (UID: "0ff57d03-1df0-469e-a02e-257696b7d646"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.738683 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.738717 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ff57d03-1df0-469e-a02e-257696b7d646-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:13 crc kubenswrapper[4888]: I1201 19:54:13.738727 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pznr\" (UniqueName: \"kubernetes.io/projected/0ff57d03-1df0-469e-a02e-257696b7d646-kube-api-access-7pznr\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.056143 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.151041 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs\") pod \"53a18ce7-60e1-4772-8e61-b3131be3a748\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.151254 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szvh4\" (UniqueName: \"kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4\") pod \"53a18ce7-60e1-4772-8e61-b3131be3a748\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.151419 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data\") pod \"53a18ce7-60e1-4772-8e61-b3131be3a748\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.151452 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle\") pod \"53a18ce7-60e1-4772-8e61-b3131be3a748\" (UID: \"53a18ce7-60e1-4772-8e61-b3131be3a748\") " Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.152657 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs" (OuterVolumeSpecName: "logs") pod "53a18ce7-60e1-4772-8e61-b3131be3a748" (UID: "53a18ce7-60e1-4772-8e61-b3131be3a748"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.156244 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4" (OuterVolumeSpecName: "kube-api-access-szvh4") pod "53a18ce7-60e1-4772-8e61-b3131be3a748" (UID: "53a18ce7-60e1-4772-8e61-b3131be3a748"). InnerVolumeSpecName "kube-api-access-szvh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.181153 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data" (OuterVolumeSpecName: "config-data") pod "53a18ce7-60e1-4772-8e61-b3131be3a748" (UID: "53a18ce7-60e1-4772-8e61-b3131be3a748"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.187468 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53a18ce7-60e1-4772-8e61-b3131be3a748" (UID: "53a18ce7-60e1-4772-8e61-b3131be3a748"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.254528 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szvh4\" (UniqueName: \"kubernetes.io/projected/53a18ce7-60e1-4772-8e61-b3131be3a748-kube-api-access-szvh4\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.254608 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.254630 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a18ce7-60e1-4772-8e61-b3131be3a748-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.254650 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a18ce7-60e1-4772-8e61-b3131be3a748-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.282008 4888 generic.go:334] "Generic (PLEG): container finished" podID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerID="d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e" exitCode=0 Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.282099 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerDied","Data":"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e"} Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.282119 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.282149 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"53a18ce7-60e1-4772-8e61-b3131be3a748","Type":"ContainerDied","Data":"e74755e0d5616cbaaa86154e959cf873ba3270b9a653b1d118c582c7a9068aac"} Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.282177 4888 scope.go:117] "RemoveContainer" containerID="d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.286238 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerStarted","Data":"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9"} Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.289062 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0ff57d03-1df0-469e-a02e-257696b7d646","Type":"ContainerDied","Data":"476ed79a8265681bf2cb312afc96e7db55b0fe8ad0f84267e62e4b56a78ab001"} Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.289146 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.327728 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.328252 4888 scope.go:117] "RemoveContainer" containerID="7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.344171 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.355107 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.368240 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.370595 4888 scope.go:117] "RemoveContainer" containerID="d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e" Dec 01 19:54:14 crc kubenswrapper[4888]: E1201 19:54:14.372085 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e\": container with ID starting with d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e not found: ID does not exist" containerID="d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.372119 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e"} err="failed to get container status \"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e\": rpc error: code = NotFound desc = could not find container \"d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e\": container with ID starting with d906624335a544ddecf3aa479065ea61341d34e4593c3b79cdd829b7000cd03e not found: ID does not exist" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.372143 4888 scope.go:117] "RemoveContainer" containerID="7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7" Dec 01 19:54:14 crc kubenswrapper[4888]: E1201 19:54:14.373006 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7\": container with ID starting with 7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7 not found: ID does not exist" containerID="7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.373047 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7"} err="failed to get container status \"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7\": rpc error: code = NotFound desc = could not find container \"7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7\": container with ID starting with 7f6f47e77d1fd4ef8fe6299188bb54c518463c7574772246815ebad72a5fc5f7 not found: ID does not exist" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.373076 4888 scope.go:117] "RemoveContainer" containerID="a3d977c12e1b932c72a6da026bf835f04cd9675a3fb3bdf00ebb604d14613771" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.384332 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: E1201 19:54:14.384874 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-log" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.384890 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-log" Dec 01 19:54:14 crc kubenswrapper[4888]: E1201 19:54:14.384906 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-api" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.384913 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-api" Dec 01 19:54:14 crc kubenswrapper[4888]: E1201 19:54:14.384933 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" containerName="nova-scheduler-scheduler" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.384939 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" containerName="nova-scheduler-scheduler" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.385120 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-api" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.385140 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" containerName="nova-api-log" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.385158 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" containerName="nova-scheduler-scheduler" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.386361 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.389005 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.395241 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.433337 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.435338 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.441925 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.450368 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.467490 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ff57d03-1df0-469e-a02e-257696b7d646" path="/var/lib/kubelet/pods/0ff57d03-1df0-469e-a02e-257696b7d646/volumes" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.468370 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53a18ce7-60e1-4772-8e61-b3131be3a748" path="/var/lib/kubelet/pods/53a18ce7-60e1-4772-8e61-b3131be3a748/volumes" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.486775 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.487054 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvc8q\" (UniqueName: \"kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.487091 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.487114 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.551029 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.551425 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.589980 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590062 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590136 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hr6p\" (UniqueName: \"kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590209 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590367 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvc8q\" (UniqueName: \"kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590408 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.590425 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.591154 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.594552 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.597397 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.620941 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvc8q\" (UniqueName: \"kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q\") pod \"nova-api-0\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.692358 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.692415 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.692441 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hr6p\" (UniqueName: \"kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.696241 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.697944 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.714526 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hr6p\" (UniqueName: \"kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p\") pod \"nova-scheduler-0\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.725688 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:14 crc kubenswrapper[4888]: I1201 19:54:14.818176 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.215419 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.306455 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerStarted","Data":"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452"} Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.307867 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.310571 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.310733 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerStarted","Data":"55052c1519e57075630ac3f8d6c9f4a753a2a52bb0244da31da91cfc5328792a"} Dec 01 19:54:15 crc kubenswrapper[4888]: W1201 19:54:15.318876 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd777d06_4cc8_489b_a2bb_7d789f6ddeb6.slice/crio-650ad5b8ffdab86493a9cfb2d1c68829e79c5c17b11b0112c8ad919367ab71b1 WatchSource:0}: Error finding container 650ad5b8ffdab86493a9cfb2d1c68829e79c5c17b11b0112c8ad919367ab71b1: Status 404 returned error can't find the container with id 650ad5b8ffdab86493a9cfb2d1c68829e79c5c17b11b0112c8ad919367ab71b1 Dec 01 19:54:15 crc kubenswrapper[4888]: I1201 19:54:15.336991 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.4214745469999999 podStartE2EDuration="5.336971642s" podCreationTimestamp="2025-12-01 19:54:10 +0000 UTC" firstStartedPulling="2025-12-01 19:54:11.107373015 +0000 UTC m=+1250.978402929" lastFinishedPulling="2025-12-01 19:54:15.02287011 +0000 UTC m=+1254.893900024" observedRunningTime="2025-12-01 19:54:15.327241443 +0000 UTC m=+1255.198271377" watchObservedRunningTime="2025-12-01 19:54:15.336971642 +0000 UTC m=+1255.208001576" Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.339825 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerStarted","Data":"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552"} Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.340773 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerStarted","Data":"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433"} Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.342702 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6","Type":"ContainerStarted","Data":"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f"} Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.342762 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6","Type":"ContainerStarted","Data":"650ad5b8ffdab86493a9cfb2d1c68829e79c5c17b11b0112c8ad919367ab71b1"} Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.371111 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.371094878 podStartE2EDuration="2.371094878s" podCreationTimestamp="2025-12-01 19:54:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:16.362375937 +0000 UTC m=+1256.233405871" watchObservedRunningTime="2025-12-01 19:54:16.371094878 +0000 UTC m=+1256.242124792" Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.392326 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.392303644 podStartE2EDuration="2.392303644s" podCreationTimestamp="2025-12-01 19:54:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:16.385301771 +0000 UTC m=+1256.256331685" watchObservedRunningTime="2025-12-01 19:54:16.392303644 +0000 UTC m=+1256.263333558" Dec 01 19:54:16 crc kubenswrapper[4888]: I1201 19:54:16.435108 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 19:54:18 crc kubenswrapper[4888]: I1201 19:54:18.519659 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 01 19:54:19 crc kubenswrapper[4888]: I1201 19:54:19.551459 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 19:54:19 crc kubenswrapper[4888]: I1201 19:54:19.551512 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 19:54:19 crc kubenswrapper[4888]: E1201 19:54:19.658856 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-conmon-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd072d085_14a2_4137_a9a1_29882ab4fe55.slice/crio-b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3.scope\": RecentStats: unable to find data in memory cache]" Dec 01 19:54:19 crc kubenswrapper[4888]: I1201 19:54:19.818816 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 19:54:20 crc kubenswrapper[4888]: I1201 19:54:20.037898 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:54:20 crc kubenswrapper[4888]: I1201 19:54:20.037969 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:54:20 crc kubenswrapper[4888]: I1201 19:54:20.563310 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:20 crc kubenswrapper[4888]: I1201 19:54:20.563315 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:24 crc kubenswrapper[4888]: I1201 19:54:24.726717 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:24 crc kubenswrapper[4888]: I1201 19:54:24.727291 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:24 crc kubenswrapper[4888]: I1201 19:54:24.818860 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 19:54:24 crc kubenswrapper[4888]: I1201 19:54:24.850014 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 19:54:25 crc kubenswrapper[4888]: I1201 19:54:25.451762 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 19:54:25 crc kubenswrapper[4888]: I1201 19:54:25.810396 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:25 crc kubenswrapper[4888]: I1201 19:54:25.810426 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 19:54:29 crc kubenswrapper[4888]: I1201 19:54:29.557575 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 19:54:29 crc kubenswrapper[4888]: I1201 19:54:29.558305 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 19:54:29 crc kubenswrapper[4888]: I1201 19:54:29.562937 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 19:54:29 crc kubenswrapper[4888]: I1201 19:54:29.563628 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.332022 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.462492 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data\") pod \"15b0cb64-b048-4160-aca0-0a1fc5560aef\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.462621 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mm8sg\" (UniqueName: \"kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg\") pod \"15b0cb64-b048-4160-aca0-0a1fc5560aef\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.462714 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle\") pod \"15b0cb64-b048-4160-aca0-0a1fc5560aef\" (UID: \"15b0cb64-b048-4160-aca0-0a1fc5560aef\") " Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.467692 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg" (OuterVolumeSpecName: "kube-api-access-mm8sg") pod "15b0cb64-b048-4160-aca0-0a1fc5560aef" (UID: "15b0cb64-b048-4160-aca0-0a1fc5560aef"). InnerVolumeSpecName "kube-api-access-mm8sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.493399 4888 generic.go:334] "Generic (PLEG): container finished" podID="15b0cb64-b048-4160-aca0-0a1fc5560aef" containerID="1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5" exitCode=137 Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.493440 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15b0cb64-b048-4160-aca0-0a1fc5560aef","Type":"ContainerDied","Data":"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5"} Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.493467 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"15b0cb64-b048-4160-aca0-0a1fc5560aef","Type":"ContainerDied","Data":"d58ee47b404a78797def802234fef200d3167c2ed35509e17ed80faf465530f8"} Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.493483 4888 scope.go:117] "RemoveContainer" containerID="1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.493587 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.496505 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15b0cb64-b048-4160-aca0-0a1fc5560aef" (UID: "15b0cb64-b048-4160-aca0-0a1fc5560aef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.497873 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data" (OuterVolumeSpecName: "config-data") pod "15b0cb64-b048-4160-aca0-0a1fc5560aef" (UID: "15b0cb64-b048-4160-aca0-0a1fc5560aef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.565626 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.565662 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mm8sg\" (UniqueName: \"kubernetes.io/projected/15b0cb64-b048-4160-aca0-0a1fc5560aef-kube-api-access-mm8sg\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.565672 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b0cb64-b048-4160-aca0-0a1fc5560aef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.568484 4888 scope.go:117] "RemoveContainer" containerID="1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5" Dec 01 19:54:32 crc kubenswrapper[4888]: E1201 19:54:32.568833 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5\": container with ID starting with 1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5 not found: ID does not exist" containerID="1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.568877 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5"} err="failed to get container status \"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5\": rpc error: code = NotFound desc = could not find container \"1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5\": container with ID starting with 1168e69d35cdf068ce67c57d151eb4e38969b3b85afa5eb6f2cb2f1fb0fa11d5 not found: ID does not exist" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.831352 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.840217 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.866726 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:32 crc kubenswrapper[4888]: E1201 19:54:32.867212 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b0cb64-b048-4160-aca0-0a1fc5560aef" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.867227 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b0cb64-b048-4160-aca0-0a1fc5560aef" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.867456 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="15b0cb64-b048-4160-aca0-0a1fc5560aef" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.868138 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.871926 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.872774 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.872943 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.896334 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.973818 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.973897 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.974031 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.974108 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:32 crc kubenswrapper[4888]: I1201 19:54:32.974212 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l98gw\" (UniqueName: \"kubernetes.io/projected/086e1f96-58d1-42ab-a745-839383b65b7e-kube-api-access-l98gw\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.076011 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.076073 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.076101 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.076149 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l98gw\" (UniqueName: \"kubernetes.io/projected/086e1f96-58d1-42ab-a745-839383b65b7e-kube-api-access-l98gw\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.076271 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.080786 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.081253 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.082768 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.086880 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/086e1f96-58d1-42ab-a745-839383b65b7e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.092788 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l98gw\" (UniqueName: \"kubernetes.io/projected/086e1f96-58d1-42ab-a745-839383b65b7e-kube-api-access-l98gw\") pod \"nova-cell1-novncproxy-0\" (UID: \"086e1f96-58d1-42ab-a745-839383b65b7e\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.235024 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:33 crc kubenswrapper[4888]: I1201 19:54:33.650212 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 19:54:33 crc kubenswrapper[4888]: W1201 19:54:33.658739 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod086e1f96_58d1_42ab_a745_839383b65b7e.slice/crio-e481b9170bc83818b3a9c06c3b6cb22ed1871e3115fde509fa6fa382cff2ea13 WatchSource:0}: Error finding container e481b9170bc83818b3a9c06c3b6cb22ed1871e3115fde509fa6fa382cff2ea13: Status 404 returned error can't find the container with id e481b9170bc83818b3a9c06c3b6cb22ed1871e3115fde509fa6fa382cff2ea13 Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.463686 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15b0cb64-b048-4160-aca0-0a1fc5560aef" path="/var/lib/kubelet/pods/15b0cb64-b048-4160-aca0-0a1fc5560aef/volumes" Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.521423 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"086e1f96-58d1-42ab-a745-839383b65b7e","Type":"ContainerStarted","Data":"d7b3021c4b87b696e10df5d573502c18840584d850d9897165bfb510a09bf4bf"} Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.521481 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"086e1f96-58d1-42ab-a745-839383b65b7e","Type":"ContainerStarted","Data":"e481b9170bc83818b3a9c06c3b6cb22ed1871e3115fde509fa6fa382cff2ea13"} Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.545822 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.545806233 podStartE2EDuration="2.545806233s" podCreationTimestamp="2025-12-01 19:54:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:34.542009498 +0000 UTC m=+1274.413039412" watchObservedRunningTime="2025-12-01 19:54:34.545806233 +0000 UTC m=+1274.416836147" Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.730013 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.730578 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.730618 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 19:54:34 crc kubenswrapper[4888]: I1201 19:54:34.733398 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.534747 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.538935 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.829447 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.831833 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.865232 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.935703 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.935826 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.935916 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.935949 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.936015 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:35 crc kubenswrapper[4888]: I1201 19:54:35.936065 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lp8z\" (UniqueName: \"kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037490 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037554 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037659 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037702 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lp8z\" (UniqueName: \"kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037737 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.037814 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.038737 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.039291 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.040701 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.040704 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.040798 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.058849 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lp8z\" (UniqueName: \"kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z\") pod \"dnsmasq-dns-89c5cd4d5-r9fhg\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.161302 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:36 crc kubenswrapper[4888]: I1201 19:54:36.674750 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.553901 4888 generic.go:334] "Generic (PLEG): container finished" podID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerID="e16814a1eaad7189834808f45f90b3c468825f4aeaa6dff2e401497faa49d023" exitCode=0 Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.554025 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" event={"ID":"06b8160e-f040-4c38-a13a-a5ec612f57ef","Type":"ContainerDied","Data":"e16814a1eaad7189834808f45f90b3c468825f4aeaa6dff2e401497faa49d023"} Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.554316 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" event={"ID":"06b8160e-f040-4c38-a13a-a5ec612f57ef","Type":"ContainerStarted","Data":"b20a021394e711aee9e9770d7209a77a69718ebd014c42127c6c9092e1f50fc5"} Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.996992 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.997522 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-central-agent" containerID="cri-o://01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" gracePeriod=30 Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.997615 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="sg-core" containerID="cri-o://2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" gracePeriod=30 Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.997646 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-notification-agent" containerID="cri-o://e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" gracePeriod=30 Dec 01 19:54:37 crc kubenswrapper[4888]: I1201 19:54:37.997645 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="proxy-httpd" containerID="cri-o://463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" gracePeriod=30 Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.008976 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.194:3000/\": read tcp 10.217.0.2:35040->10.217.0.194:3000: read: connection reset by peer" Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.236021 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.286331 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.583992 4888 generic.go:334] "Generic (PLEG): container finished" podID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerID="463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" exitCode=0 Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.584295 4888 generic.go:334] "Generic (PLEG): container finished" podID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerID="2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" exitCode=2 Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.584064 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerDied","Data":"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452"} Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.584339 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerDied","Data":"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9"} Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.586709 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" event={"ID":"06b8160e-f040-4c38-a13a-a5ec612f57ef","Type":"ContainerStarted","Data":"c5f9d62494b09a9b8307e38b2810158fcc13d06e285f2a6b913d1f7fd4abcbcb"} Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.586764 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.587289 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-log" containerID="cri-o://9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433" gracePeriod=30 Dec 01 19:54:38 crc kubenswrapper[4888]: I1201 19:54:38.587396 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-api" containerID="cri-o://29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552" gracePeriod=30 Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.425062 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.457155 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" podStartSLOduration=4.457133155 podStartE2EDuration="4.457133155s" podCreationTimestamp="2025-12-01 19:54:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:38.651418783 +0000 UTC m=+1278.522448707" watchObservedRunningTime="2025-12-01 19:54:39.457133155 +0000 UTC m=+1279.328163069" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507154 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5vtc\" (UniqueName: \"kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507238 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507274 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507309 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507454 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507541 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507586 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507620 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts\") pod \"7ec8ec36-8e38-4494-ac51-6998b16519da\" (UID: \"7ec8ec36-8e38-4494-ac51-6998b16519da\") " Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507737 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.507796 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.508075 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.508101 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ec8ec36-8e38-4494-ac51-6998b16519da-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.514346 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts" (OuterVolumeSpecName: "scripts") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.515293 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc" (OuterVolumeSpecName: "kube-api-access-h5vtc") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "kube-api-access-h5vtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.545939 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.577305 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.605565 4888 generic.go:334] "Generic (PLEG): container finished" podID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerID="9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433" exitCode=143 Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.605632 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerDied","Data":"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433"} Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609345 4888 generic.go:334] "Generic (PLEG): container finished" podID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerID="e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" exitCode=0 Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609371 4888 generic.go:334] "Generic (PLEG): container finished" podID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerID="01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" exitCode=0 Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609714 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609778 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerDied","Data":"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2"} Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609797 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609826 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerDied","Data":"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52"} Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609842 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7ec8ec36-8e38-4494-ac51-6998b16519da","Type":"ContainerDied","Data":"3f56b547a568572a93e33ba3c09a496c7ed11e7bd87c38c46463a44e735b7681"} Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.609862 4888 scope.go:117] "RemoveContainer" containerID="463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.610666 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.610856 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5vtc\" (UniqueName: \"kubernetes.io/projected/7ec8ec36-8e38-4494-ac51-6998b16519da-kube-api-access-h5vtc\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.610868 4888 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.622690 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.637763 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data" (OuterVolumeSpecName: "config-data") pod "7ec8ec36-8e38-4494-ac51-6998b16519da" (UID: "7ec8ec36-8e38-4494-ac51-6998b16519da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.708722 4888 scope.go:117] "RemoveContainer" containerID="2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.712785 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.712826 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec8ec36-8e38-4494-ac51-6998b16519da-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.732714 4888 scope.go:117] "RemoveContainer" containerID="e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.750946 4888 scope.go:117] "RemoveContainer" containerID="01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.771099 4888 scope.go:117] "RemoveContainer" containerID="463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" Dec 01 19:54:39 crc kubenswrapper[4888]: E1201 19:54:39.771968 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452\": container with ID starting with 463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452 not found: ID does not exist" containerID="463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772026 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452"} err="failed to get container status \"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452\": rpc error: code = NotFound desc = could not find container \"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452\": container with ID starting with 463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772061 4888 scope.go:117] "RemoveContainer" containerID="2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" Dec 01 19:54:39 crc kubenswrapper[4888]: E1201 19:54:39.772450 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9\": container with ID starting with 2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9 not found: ID does not exist" containerID="2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772491 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9"} err="failed to get container status \"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9\": rpc error: code = NotFound desc = could not find container \"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9\": container with ID starting with 2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772520 4888 scope.go:117] "RemoveContainer" containerID="e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" Dec 01 19:54:39 crc kubenswrapper[4888]: E1201 19:54:39.772833 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2\": container with ID starting with e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2 not found: ID does not exist" containerID="e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772857 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2"} err="failed to get container status \"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2\": rpc error: code = NotFound desc = could not find container \"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2\": container with ID starting with e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.772872 4888 scope.go:117] "RemoveContainer" containerID="01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" Dec 01 19:54:39 crc kubenswrapper[4888]: E1201 19:54:39.773272 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52\": container with ID starting with 01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52 not found: ID does not exist" containerID="01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773306 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52"} err="failed to get container status \"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52\": rpc error: code = NotFound desc = could not find container \"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52\": container with ID starting with 01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773344 4888 scope.go:117] "RemoveContainer" containerID="463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773625 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452"} err="failed to get container status \"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452\": rpc error: code = NotFound desc = could not find container \"463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452\": container with ID starting with 463e236fb83e0e6f028a862f7881741c657df42b8807dce9e082804c5e18b452 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773647 4888 scope.go:117] "RemoveContainer" containerID="2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773878 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9"} err="failed to get container status \"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9\": rpc error: code = NotFound desc = could not find container \"2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9\": container with ID starting with 2aa438637af752138039140b2fb9e2c6642ba9a2a0cdf3522157c8bac48312c9 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.773903 4888 scope.go:117] "RemoveContainer" containerID="e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.774144 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2"} err="failed to get container status \"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2\": rpc error: code = NotFound desc = could not find container \"e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2\": container with ID starting with e23e1aeb54a383b12063ed3c85f5dd1d48f27ba23e88aafcfe1378f555b17db2 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.774168 4888 scope.go:117] "RemoveContainer" containerID="01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.774408 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52"} err="failed to get container status \"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52\": rpc error: code = NotFound desc = could not find container \"01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52\": container with ID starting with 01f6a7bb575bceaae775e58c494d2ef0a9cb91b268ca4510c5ac8cf596b7fa52 not found: ID does not exist" Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.962621 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:39 crc kubenswrapper[4888]: I1201 19:54:39.987399 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007064 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:40 crc kubenswrapper[4888]: E1201 19:54:40.007474 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="sg-core" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007492 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="sg-core" Dec 01 19:54:40 crc kubenswrapper[4888]: E1201 19:54:40.007511 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="proxy-httpd" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007518 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="proxy-httpd" Dec 01 19:54:40 crc kubenswrapper[4888]: E1201 19:54:40.007527 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-notification-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007534 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-notification-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: E1201 19:54:40.007547 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-central-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007552 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-central-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007749 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-notification-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007771 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="sg-core" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007789 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="ceilometer-central-agent" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.007801 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" containerName="proxy-httpd" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.009487 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.012757 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.013052 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.013457 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.015606 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120042 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120089 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120116 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120140 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120252 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120279 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120410 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.120527 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89ksl\" (UniqueName: \"kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223572 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223646 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223690 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223717 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223891 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.223940 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.224032 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.224117 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89ksl\" (UniqueName: \"kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.225282 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.225348 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.229207 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.229294 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.229464 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.229864 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.230265 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.250217 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89ksl\" (UniqueName: \"kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl\") pod \"ceilometer-0\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.257705 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.258818 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.471025 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ec8ec36-8e38-4494-ac51-6998b16519da" path="/var/lib/kubelet/pods/7ec8ec36-8e38-4494-ac51-6998b16519da/volumes" Dec 01 19:54:40 crc kubenswrapper[4888]: I1201 19:54:40.736791 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:54:40 crc kubenswrapper[4888]: W1201 19:54:40.749431 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f55dfd9_ee17_4533_ab5e_870900af36c9.slice/crio-bcf2e1b54169996b72ca53cb4fa8ffd22e67dc00d6a340deea19159e5a4e27c8 WatchSource:0}: Error finding container bcf2e1b54169996b72ca53cb4fa8ffd22e67dc00d6a340deea19159e5a4e27c8: Status 404 returned error can't find the container with id bcf2e1b54169996b72ca53cb4fa8ffd22e67dc00d6a340deea19159e5a4e27c8 Dec 01 19:54:41 crc kubenswrapper[4888]: I1201 19:54:41.632344 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerStarted","Data":"bcf2e1b54169996b72ca53cb4fa8ffd22e67dc00d6a340deea19159e5a4e27c8"} Dec 01 19:54:41 crc kubenswrapper[4888]: I1201 19:54:41.752138 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.108693 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.158136 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data\") pod \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.158260 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle\") pod \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.158394 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs\") pod \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.158517 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvc8q\" (UniqueName: \"kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q\") pod \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\" (UID: \"d57884a1-f8fa-4747-9bfb-9b09c8da2d35\") " Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.159176 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs" (OuterVolumeSpecName: "logs") pod "d57884a1-f8fa-4747-9bfb-9b09c8da2d35" (UID: "d57884a1-f8fa-4747-9bfb-9b09c8da2d35"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.171612 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q" (OuterVolumeSpecName: "kube-api-access-jvc8q") pod "d57884a1-f8fa-4747-9bfb-9b09c8da2d35" (UID: "d57884a1-f8fa-4747-9bfb-9b09c8da2d35"). InnerVolumeSpecName "kube-api-access-jvc8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.201889 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data" (OuterVolumeSpecName: "config-data") pod "d57884a1-f8fa-4747-9bfb-9b09c8da2d35" (UID: "d57884a1-f8fa-4747-9bfb-9b09c8da2d35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.203058 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d57884a1-f8fa-4747-9bfb-9b09c8da2d35" (UID: "d57884a1-f8fa-4747-9bfb-9b09c8da2d35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.261539 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvc8q\" (UniqueName: \"kubernetes.io/projected/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-kube-api-access-jvc8q\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.261581 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.261593 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.261602 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d57884a1-f8fa-4747-9bfb-9b09c8da2d35-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.641779 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerStarted","Data":"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153"} Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.643610 4888 generic.go:334] "Generic (PLEG): container finished" podID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerID="29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552" exitCode=0 Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.643638 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerDied","Data":"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552"} Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.643655 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d57884a1-f8fa-4747-9bfb-9b09c8da2d35","Type":"ContainerDied","Data":"55052c1519e57075630ac3f8d6c9f4a753a2a52bb0244da31da91cfc5328792a"} Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.643671 4888 scope.go:117] "RemoveContainer" containerID="29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.643723 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.687551 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.696466 4888 scope.go:117] "RemoveContainer" containerID="9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.706056 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.720944 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:42 crc kubenswrapper[4888]: E1201 19:54:42.722599 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-api" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.722623 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-api" Dec 01 19:54:42 crc kubenswrapper[4888]: E1201 19:54:42.722656 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-log" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.722664 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-log" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.722911 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-api" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.722941 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" containerName="nova-api-log" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.724634 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.727426 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.727637 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.728361 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.731787 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.742862 4888 scope.go:117] "RemoveContainer" containerID="29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552" Dec 01 19:54:42 crc kubenswrapper[4888]: E1201 19:54:42.745425 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552\": container with ID starting with 29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552 not found: ID does not exist" containerID="29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.745480 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552"} err="failed to get container status \"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552\": rpc error: code = NotFound desc = could not find container \"29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552\": container with ID starting with 29d87cfc09f2a6350a500c10a1e24ddad5d7729a4c3d052c8e6e591ac2981552 not found: ID does not exist" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.745514 4888 scope.go:117] "RemoveContainer" containerID="9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433" Dec 01 19:54:42 crc kubenswrapper[4888]: E1201 19:54:42.746109 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433\": container with ID starting with 9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433 not found: ID does not exist" containerID="9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.746156 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433"} err="failed to get container status \"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433\": rpc error: code = NotFound desc = could not find container \"9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433\": container with ID starting with 9255466cc336422615d0d175658d1c0df64fe54b8b4efd0abf9165dc3f1b2433 not found: ID does not exist" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.777710 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.777847 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.777899 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25p9q\" (UniqueName: \"kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.778094 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.778153 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.778217 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880212 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880259 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25p9q\" (UniqueName: \"kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880293 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880320 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880341 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880429 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.880586 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.885303 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.893323 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.893759 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.894092 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:42 crc kubenswrapper[4888]: I1201 19:54:42.896459 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25p9q\" (UniqueName: \"kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q\") pod \"nova-api-0\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " pod="openstack/nova-api-0" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.047110 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.235579 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.282655 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.550182 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:43 crc kubenswrapper[4888]: W1201 19:54:43.551973 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c32cbe6_786f_4d2f_842d_7084c73afcd1.slice/crio-1db5aaeeabd6dfd6f87ca2a9baf0a5e7e4e5343b106e73e841c97777568b0c6a WatchSource:0}: Error finding container 1db5aaeeabd6dfd6f87ca2a9baf0a5e7e4e5343b106e73e841c97777568b0c6a: Status 404 returned error can't find the container with id 1db5aaeeabd6dfd6f87ca2a9baf0a5e7e4e5343b106e73e841c97777568b0c6a Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.660927 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerStarted","Data":"1db5aaeeabd6dfd6f87ca2a9baf0a5e7e4e5343b106e73e841c97777568b0c6a"} Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.662833 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerStarted","Data":"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b"} Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.683324 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.954548 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-l77vw"] Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.965767 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.999485 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 01 19:54:43 crc kubenswrapper[4888]: I1201 19:54:43.999629 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.002636 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wp56\" (UniqueName: \"kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.002786 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.003019 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.003066 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.006442 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-l77vw"] Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.105172 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wp56\" (UniqueName: \"kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.105251 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.105342 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.105355 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.110158 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.110774 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.111825 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.125289 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wp56\" (UniqueName: \"kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56\") pod \"nova-cell1-cell-mapping-l77vw\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.326391 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.462955 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d57884a1-f8fa-4747-9bfb-9b09c8da2d35" path="/var/lib/kubelet/pods/d57884a1-f8fa-4747-9bfb-9b09c8da2d35/volumes" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.675292 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerStarted","Data":"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898"} Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.675341 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerStarted","Data":"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9"} Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.678470 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerStarted","Data":"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f"} Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.710991 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.7109701040000003 podStartE2EDuration="2.710970104s" podCreationTimestamp="2025-12-01 19:54:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:44.697539743 +0000 UTC m=+1284.568569657" watchObservedRunningTime="2025-12-01 19:54:44.710970104 +0000 UTC m=+1284.582000018" Dec 01 19:54:44 crc kubenswrapper[4888]: I1201 19:54:44.948795 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-l77vw"] Dec 01 19:54:44 crc kubenswrapper[4888]: W1201 19:54:44.951926 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeea443d4_4a6d_47df_9839_7108ffb3d4bc.slice/crio-301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8 WatchSource:0}: Error finding container 301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8: Status 404 returned error can't find the container with id 301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8 Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.692267 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerStarted","Data":"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246"} Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.692591 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-central-agent" containerID="cri-o://ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153" gracePeriod=30 Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.692713 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="proxy-httpd" containerID="cri-o://1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246" gracePeriod=30 Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.692831 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="sg-core" containerID="cri-o://5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f" gracePeriod=30 Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.692868 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-notification-agent" containerID="cri-o://0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b" gracePeriod=30 Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.693303 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.702997 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-l77vw" event={"ID":"eea443d4-4a6d-47df-9839-7108ffb3d4bc","Type":"ContainerStarted","Data":"d1a793ae03c91d8ddda052feef2f2aa551fc8bf149f4fc0f8f4888f06fea49fb"} Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.703130 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-l77vw" event={"ID":"eea443d4-4a6d-47df-9839-7108ffb3d4bc","Type":"ContainerStarted","Data":"301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8"} Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.732803 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.076723794 podStartE2EDuration="6.732777729s" podCreationTimestamp="2025-12-01 19:54:39 +0000 UTC" firstStartedPulling="2025-12-01 19:54:40.75243188 +0000 UTC m=+1280.623461794" lastFinishedPulling="2025-12-01 19:54:45.408485815 +0000 UTC m=+1285.279515729" observedRunningTime="2025-12-01 19:54:45.728004258 +0000 UTC m=+1285.599034182" watchObservedRunningTime="2025-12-01 19:54:45.732777729 +0000 UTC m=+1285.603807643" Dec 01 19:54:45 crc kubenswrapper[4888]: I1201 19:54:45.751036 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-l77vw" podStartSLOduration=2.751014584 podStartE2EDuration="2.751014584s" podCreationTimestamp="2025-12-01 19:54:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:45.741442759 +0000 UTC m=+1285.612472673" watchObservedRunningTime="2025-12-01 19:54:45.751014584 +0000 UTC m=+1285.622044498" Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.163374 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.222255 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.222730 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="dnsmasq-dns" containerID="cri-o://53a33fe78c3c4d3fc20837f0e399a847af8b566e36f032513d2db9c50fbf7a0b" gracePeriod=10 Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.480317 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.188:5353: connect: connection refused" Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.717140 4888 generic.go:334] "Generic (PLEG): container finished" podID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerID="53a33fe78c3c4d3fc20837f0e399a847af8b566e36f032513d2db9c50fbf7a0b" exitCode=0 Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.717212 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" event={"ID":"f7fe219b-5f85-484b-b6cb-495f1e6264f2","Type":"ContainerDied","Data":"53a33fe78c3c4d3fc20837f0e399a847af8b566e36f032513d2db9c50fbf7a0b"} Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.717354 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" event={"ID":"f7fe219b-5f85-484b-b6cb-495f1e6264f2","Type":"ContainerDied","Data":"887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0"} Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.717374 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="887c7ce0f81dead07bc592ac8e543ecc51bf7b5242dc08f1c340bd854daea4b0" Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.719801 4888 generic.go:334] "Generic (PLEG): container finished" podID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerID="5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f" exitCode=2 Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.719875 4888 generic.go:334] "Generic (PLEG): container finished" podID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerID="0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b" exitCode=0 Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.719841 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerDied","Data":"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f"} Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.719971 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerDied","Data":"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b"} Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.777582 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973542 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973680 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973881 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973909 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p86nn\" (UniqueName: \"kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973932 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:46 crc kubenswrapper[4888]: I1201 19:54:46.973984 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb\") pod \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\" (UID: \"f7fe219b-5f85-484b-b6cb-495f1e6264f2\") " Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.006470 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn" (OuterVolumeSpecName: "kube-api-access-p86nn") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "kube-api-access-p86nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.032217 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config" (OuterVolumeSpecName: "config") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.042608 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.047028 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.051114 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.054507 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f7fe219b-5f85-484b-b6cb-495f1e6264f2" (UID: "f7fe219b-5f85-484b-b6cb-495f1e6264f2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076501 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076551 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076568 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p86nn\" (UniqueName: \"kubernetes.io/projected/f7fe219b-5f85-484b-b6cb-495f1e6264f2-kube-api-access-p86nn\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076585 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076599 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.076614 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f7fe219b-5f85-484b-b6cb-495f1e6264f2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.734703 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-rk96c" Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.771142 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:54:47 crc kubenswrapper[4888]: I1201 19:54:47.784484 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-rk96c"] Dec 01 19:54:48 crc kubenswrapper[4888]: I1201 19:54:48.461926 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" path="/var/lib/kubelet/pods/f7fe219b-5f85-484b-b6cb-495f1e6264f2/volumes" Dec 01 19:54:48 crc kubenswrapper[4888]: I1201 19:54:48.744795 4888 generic.go:334] "Generic (PLEG): container finished" podID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerID="ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153" exitCode=0 Dec 01 19:54:48 crc kubenswrapper[4888]: I1201 19:54:48.744848 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerDied","Data":"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153"} Dec 01 19:54:50 crc kubenswrapper[4888]: I1201 19:54:50.038214 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:54:50 crc kubenswrapper[4888]: I1201 19:54:50.038549 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:54:50 crc kubenswrapper[4888]: I1201 19:54:50.952970 4888 generic.go:334] "Generic (PLEG): container finished" podID="eea443d4-4a6d-47df-9839-7108ffb3d4bc" containerID="d1a793ae03c91d8ddda052feef2f2aa551fc8bf149f4fc0f8f4888f06fea49fb" exitCode=0 Dec 01 19:54:50 crc kubenswrapper[4888]: I1201 19:54:50.953052 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-l77vw" event={"ID":"eea443d4-4a6d-47df-9839-7108ffb3d4bc","Type":"ContainerDied","Data":"d1a793ae03c91d8ddda052feef2f2aa551fc8bf149f4fc0f8f4888f06fea49fb"} Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.318294 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.355562 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wp56\" (UniqueName: \"kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56\") pod \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.355622 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts\") pod \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.355644 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data\") pod \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.355689 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle\") pod \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\" (UID: \"eea443d4-4a6d-47df-9839-7108ffb3d4bc\") " Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.363360 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts" (OuterVolumeSpecName: "scripts") pod "eea443d4-4a6d-47df-9839-7108ffb3d4bc" (UID: "eea443d4-4a6d-47df-9839-7108ffb3d4bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.363473 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56" (OuterVolumeSpecName: "kube-api-access-9wp56") pod "eea443d4-4a6d-47df-9839-7108ffb3d4bc" (UID: "eea443d4-4a6d-47df-9839-7108ffb3d4bc"). InnerVolumeSpecName "kube-api-access-9wp56". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.385445 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data" (OuterVolumeSpecName: "config-data") pod "eea443d4-4a6d-47df-9839-7108ffb3d4bc" (UID: "eea443d4-4a6d-47df-9839-7108ffb3d4bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.386894 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eea443d4-4a6d-47df-9839-7108ffb3d4bc" (UID: "eea443d4-4a6d-47df-9839-7108ffb3d4bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.457132 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.457164 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wp56\" (UniqueName: \"kubernetes.io/projected/eea443d4-4a6d-47df-9839-7108ffb3d4bc-kube-api-access-9wp56\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.457174 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.457202 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea443d4-4a6d-47df-9839-7108ffb3d4bc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.971314 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-l77vw" event={"ID":"eea443d4-4a6d-47df-9839-7108ffb3d4bc","Type":"ContainerDied","Data":"301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8"} Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.971359 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="301a4ba0331435af91a72b6e0818375ccb6cae4c68250c4f6cf1919bc177afd8" Dec 01 19:54:52 crc kubenswrapper[4888]: I1201 19:54:52.971754 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-l77vw" Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.048316 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.048377 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.158850 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.229772 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.230085 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerName="nova-scheduler-scheduler" containerID="cri-o://e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" gracePeriod=30 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.248262 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.248751 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" containerID="cri-o://33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47" gracePeriod=30 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.248883 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" containerID="cri-o://363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007" gracePeriod=30 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.987421 4888 generic.go:334] "Generic (PLEG): container finished" podID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerID="33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47" exitCode=143 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.987542 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerDied","Data":"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47"} Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.988847 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-log" containerID="cri-o://32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9" gracePeriod=30 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.989070 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-api" containerID="cri-o://3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898" gracePeriod=30 Dec 01 19:54:53 crc kubenswrapper[4888]: I1201 19:54:53.995235 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Dec 01 19:54:54 crc kubenswrapper[4888]: I1201 19:54:53.999654 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Dec 01 19:54:54 crc kubenswrapper[4888]: E1201 19:54:54.820661 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:54 crc kubenswrapper[4888]: E1201 19:54:54.822207 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:54 crc kubenswrapper[4888]: E1201 19:54:54.823441 4888 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 01 19:54:54 crc kubenswrapper[4888]: E1201 19:54:54.823472 4888 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerName="nova-scheduler-scheduler" Dec 01 19:54:55 crc kubenswrapper[4888]: I1201 19:54:55.017159 4888 generic.go:334] "Generic (PLEG): container finished" podID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerID="32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9" exitCode=143 Dec 01 19:54:55 crc kubenswrapper[4888]: I1201 19:54:55.017262 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerDied","Data":"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9"} Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.384745 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:37154->10.217.0.193:8775: read: connection reset by peer" Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.385572 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.193:8775/\": read tcp 10.217.0.2:37152->10.217.0.193:8775: read: connection reset by peer" Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.928723 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.962579 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs\") pod \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.962715 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs\") pod \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " Dec 01 19:54:56 crc kubenswrapper[4888]: I1201 19:54:56.964409 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs" (OuterVolumeSpecName: "logs") pod "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" (UID: "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.036336 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" (UID: "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.045031 4888 generic.go:334] "Generic (PLEG): container finished" podID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerID="363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007" exitCode=0 Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.045082 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.045104 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerDied","Data":"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007"} Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.045141 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87","Type":"ContainerDied","Data":"d302cb69a31b675b5ba6a9f916437ecec3d1e69b389d71f2a793eed80a93b636"} Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.045165 4888 scope.go:117] "RemoveContainer" containerID="363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.064344 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data\") pod \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.064788 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle\") pod \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.064828 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf8gb\" (UniqueName: \"kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb\") pod \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\" (UID: \"dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87\") " Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.065243 4888 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.065262 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.071014 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb" (OuterVolumeSpecName: "kube-api-access-kf8gb") pod "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" (UID: "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87"). InnerVolumeSpecName "kube-api-access-kf8gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.099553 4888 scope.go:117] "RemoveContainer" containerID="33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.104539 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data" (OuterVolumeSpecName: "config-data") pod "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" (UID: "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.106478 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" (UID: "dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.121437 4888 scope.go:117] "RemoveContainer" containerID="363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.121902 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007\": container with ID starting with 363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007 not found: ID does not exist" containerID="363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.122027 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007"} err="failed to get container status \"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007\": rpc error: code = NotFound desc = could not find container \"363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007\": container with ID starting with 363b4ae8fd46980db5167b818e0a785e5013a6832a2a6603dcc1170556b7a007 not found: ID does not exist" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.122082 4888 scope.go:117] "RemoveContainer" containerID="33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.122337 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47\": container with ID starting with 33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47 not found: ID does not exist" containerID="33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.122361 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47"} err="failed to get container status \"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47\": rpc error: code = NotFound desc = could not find container \"33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47\": container with ID starting with 33a54860f2307d01a9813b70bf7b354cc1b97ffa7df8b292a8e17db9a23efd47 not found: ID does not exist" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.166469 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.166506 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.166522 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf8gb\" (UniqueName: \"kubernetes.io/projected/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87-kube-api-access-kf8gb\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.381579 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.392274 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.409671 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.410255 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410276 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.410313 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="init" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410322 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="init" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.410338 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410349 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.410361 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="dnsmasq-dns" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410368 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="dnsmasq-dns" Dec 01 19:54:57 crc kubenswrapper[4888]: E1201 19:54:57.410412 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea443d4-4a6d-47df-9839-7108ffb3d4bc" containerName="nova-manage" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410420 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea443d4-4a6d-47df-9839-7108ffb3d4bc" containerName="nova-manage" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410645 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea443d4-4a6d-47df-9839-7108ffb3d4bc" containerName="nova-manage" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410700 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-log" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410716 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7fe219b-5f85-484b-b6cb-495f1e6264f2" containerName="dnsmasq-dns" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.410742 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" containerName="nova-metadata-metadata" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.411976 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.414256 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.415054 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.420978 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.471674 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jrsc\" (UniqueName: \"kubernetes.io/projected/649d802d-a02d-403f-938c-8875b22f1e04-kube-api-access-6jrsc\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.471735 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-config-data\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.472045 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.472101 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649d802d-a02d-403f-938c-8875b22f1e04-logs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.472129 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.574220 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.574334 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649d802d-a02d-403f-938c-8875b22f1e04-logs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.574364 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.574478 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jrsc\" (UniqueName: \"kubernetes.io/projected/649d802d-a02d-403f-938c-8875b22f1e04-kube-api-access-6jrsc\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.574509 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-config-data\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.575930 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/649d802d-a02d-403f-938c-8875b22f1e04-logs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.579544 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.579894 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-config-data\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.580067 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/649d802d-a02d-403f-938c-8875b22f1e04-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.599510 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jrsc\" (UniqueName: \"kubernetes.io/projected/649d802d-a02d-403f-938c-8875b22f1e04-kube-api-access-6jrsc\") pod \"nova-metadata-0\" (UID: \"649d802d-a02d-403f-938c-8875b22f1e04\") " pod="openstack/nova-metadata-0" Dec 01 19:54:57 crc kubenswrapper[4888]: I1201 19:54:57.734865 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 19:54:58 crc kubenswrapper[4888]: I1201 19:54:58.187090 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 19:54:58 crc kubenswrapper[4888]: W1201 19:54:58.194566 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod649d802d_a02d_403f_938c_8875b22f1e04.slice/crio-9e79d202dba99e9fe92eeafe193402b03e32ef6e4703ed7235efab7629a4577c WatchSource:0}: Error finding container 9e79d202dba99e9fe92eeafe193402b03e32ef6e4703ed7235efab7629a4577c: Status 404 returned error can't find the container with id 9e79d202dba99e9fe92eeafe193402b03e32ef6e4703ed7235efab7629a4577c Dec 01 19:54:58 crc kubenswrapper[4888]: I1201 19:54:58.487415 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87" path="/var/lib/kubelet/pods/dd47ab1b-d2d3-425c-9f8c-bb5ab9ae1c87/volumes" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.009844 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.087282 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"649d802d-a02d-403f-938c-8875b22f1e04","Type":"ContainerStarted","Data":"b0d2911347acb1669542a736cfbd7546f2460808288dd881cbc5c680ac50ec01"} Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.087329 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"649d802d-a02d-403f-938c-8875b22f1e04","Type":"ContainerStarted","Data":"522433fcd66eef967538961e6f03c4a191d1d72c36420c26a16feac303fdcb0c"} Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.087338 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"649d802d-a02d-403f-938c-8875b22f1e04","Type":"ContainerStarted","Data":"9e79d202dba99e9fe92eeafe193402b03e32ef6e4703ed7235efab7629a4577c"} Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.091342 4888 generic.go:334] "Generic (PLEG): container finished" podID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" exitCode=0 Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.091383 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6","Type":"ContainerDied","Data":"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f"} Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.091397 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.091412 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6","Type":"ContainerDied","Data":"650ad5b8ffdab86493a9cfb2d1c68829e79c5c17b11b0112c8ad919367ab71b1"} Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.091433 4888 scope.go:117] "RemoveContainer" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.114835 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.114817879 podStartE2EDuration="2.114817879s" podCreationTimestamp="2025-12-01 19:54:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:54:59.109333328 +0000 UTC m=+1298.980363262" watchObservedRunningTime="2025-12-01 19:54:59.114817879 +0000 UTC m=+1298.985847793" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.115581 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data\") pod \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.115694 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle\") pod \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.115857 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hr6p\" (UniqueName: \"kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p\") pod \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\" (UID: \"bd777d06-4cc8-489b-a2bb-7d789f6ddeb6\") " Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.128100 4888 scope.go:117] "RemoveContainer" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" Dec 01 19:54:59 crc kubenswrapper[4888]: E1201 19:54:59.128587 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f\": container with ID starting with e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f not found: ID does not exist" containerID="e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.128617 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f"} err="failed to get container status \"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f\": rpc error: code = NotFound desc = could not find container \"e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f\": container with ID starting with e680de929625193a261c60a4d67adba815eba5073b681fce5ac5049b25668a2f not found: ID does not exist" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.128752 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p" (OuterVolumeSpecName: "kube-api-access-4hr6p") pod "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" (UID: "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6"). InnerVolumeSpecName "kube-api-access-4hr6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.148572 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data" (OuterVolumeSpecName: "config-data") pod "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" (UID: "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.148739 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" (UID: "bd777d06-4cc8-489b-a2bb-7d789f6ddeb6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.221270 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.221321 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.221337 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hr6p\" (UniqueName: \"kubernetes.io/projected/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6-kube-api-access-4hr6p\") on node \"crc\" DevicePath \"\"" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.545415 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.552788 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.564990 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:59 crc kubenswrapper[4888]: E1201 19:54:59.565574 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerName="nova-scheduler-scheduler" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.565593 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerName="nova-scheduler-scheduler" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.565781 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" containerName="nova-scheduler-scheduler" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.566560 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.571004 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.587400 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.735813 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.735885 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24k8h\" (UniqueName: \"kubernetes.io/projected/e595d238-ccc8-452b-9e47-3439757e586f-kube-api-access-24k8h\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.735937 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-config-data\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.837613 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-config-data\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.837740 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.837794 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24k8h\" (UniqueName: \"kubernetes.io/projected/e595d238-ccc8-452b-9e47-3439757e586f-kube-api-access-24k8h\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.842276 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-config-data\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.842290 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e595d238-ccc8-452b-9e47-3439757e586f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.869990 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24k8h\" (UniqueName: \"kubernetes.io/projected/e595d238-ccc8-452b-9e47-3439757e586f-kube-api-access-24k8h\") pod \"nova-scheduler-0\" (UID: \"e595d238-ccc8-452b-9e47-3439757e586f\") " pod="openstack/nova-scheduler-0" Dec 01 19:54:59 crc kubenswrapper[4888]: I1201 19:54:59.902516 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.025145 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.101901 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.101947 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerDied","Data":"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898"} Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.101958 4888 generic.go:334] "Generic (PLEG): container finished" podID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerID="3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898" exitCode=0 Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.101990 4888 scope.go:117] "RemoveContainer" containerID="3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.102120 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6c32cbe6-786f-4d2f-842d-7084c73afcd1","Type":"ContainerDied","Data":"1db5aaeeabd6dfd6f87ca2a9baf0a5e7e4e5343b106e73e841c97777568b0c6a"} Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.125823 4888 scope.go:117] "RemoveContainer" containerID="32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143117 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143171 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143253 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25p9q\" (UniqueName: \"kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143300 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143319 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143434 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.143878 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs" (OuterVolumeSpecName: "logs") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.144047 4888 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c32cbe6-786f-4d2f-842d-7084c73afcd1-logs\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.148341 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q" (OuterVolumeSpecName: "kube-api-access-25p9q") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "kube-api-access-25p9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.155475 4888 scope.go:117] "RemoveContainer" containerID="3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898" Dec 01 19:55:00 crc kubenswrapper[4888]: E1201 19:55:00.155928 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898\": container with ID starting with 3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898 not found: ID does not exist" containerID="3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.155964 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898"} err="failed to get container status \"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898\": rpc error: code = NotFound desc = could not find container \"3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898\": container with ID starting with 3e358ac85c4fc1e8288bfd629438f405859d996e300f26b304dd5037ce985898 not found: ID does not exist" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.156004 4888 scope.go:117] "RemoveContainer" containerID="32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9" Dec 01 19:55:00 crc kubenswrapper[4888]: E1201 19:55:00.156205 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9\": container with ID starting with 32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9 not found: ID does not exist" containerID="32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.156221 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9"} err="failed to get container status \"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9\": rpc error: code = NotFound desc = could not find container \"32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9\": container with ID starting with 32d78ea08ab5f939d4a5cfc1c448780ffbe6bb35ab663b89635641a0503e7bf9 not found: ID does not exist" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.172776 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data" (OuterVolumeSpecName: "config-data") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.173509 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: E1201 19:55:00.195219 4888 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs podName:6c32cbe6-786f-4d2f-842d-7084c73afcd1 nodeName:}" failed. No retries permitted until 2025-12-01 19:55:00.695178019 +0000 UTC m=+1300.566207933 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1") : error deleting /var/lib/kubelet/pods/6c32cbe6-786f-4d2f-842d-7084c73afcd1/volume-subpaths: remove /var/lib/kubelet/pods/6c32cbe6-786f-4d2f-842d-7084c73afcd1/volume-subpaths: no such file or directory Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.197333 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.245735 4888 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.245766 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.245775 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25p9q\" (UniqueName: \"kubernetes.io/projected/6c32cbe6-786f-4d2f-842d-7084c73afcd1-kube-api-access-25p9q\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.245784 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.335492 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 19:55:00 crc kubenswrapper[4888]: W1201 19:55:00.337832 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode595d238_ccc8_452b_9e47_3439757e586f.slice/crio-be012887000c7ddf1b1038e6bc72955356850176a48f55a7e606b6176209b66c WatchSource:0}: Error finding container be012887000c7ddf1b1038e6bc72955356850176a48f55a7e606b6176209b66c: Status 404 returned error can't find the container with id be012887000c7ddf1b1038e6bc72955356850176a48f55a7e606b6176209b66c Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.468554 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd777d06-4cc8-489b-a2bb-7d789f6ddeb6" path="/var/lib/kubelet/pods/bd777d06-4cc8-489b-a2bb-7d789f6ddeb6/volumes" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.755382 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") pod \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\" (UID: \"6c32cbe6-786f-4d2f-842d-7084c73afcd1\") " Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.764358 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6c32cbe6-786f-4d2f-842d-7084c73afcd1" (UID: "6c32cbe6-786f-4d2f-842d-7084c73afcd1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:00 crc kubenswrapper[4888]: I1201 19:55:00.858656 4888 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c32cbe6-786f-4d2f-842d-7084c73afcd1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.038051 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.050018 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.065581 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 19:55:01 crc kubenswrapper[4888]: E1201 19:55:01.066005 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-log" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.066023 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-log" Dec 01 19:55:01 crc kubenswrapper[4888]: E1201 19:55:01.066048 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-api" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.066056 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-api" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.066294 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-log" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.066372 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" containerName="nova-api-api" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.067597 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.070665 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.070910 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.071036 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.082482 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.115238 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e595d238-ccc8-452b-9e47-3439757e586f","Type":"ContainerStarted","Data":"b2c6277cc7c9ca8c6f76aa36e3ccace25550c483a2ececaca7e6692ef85ce62e"} Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.115293 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e595d238-ccc8-452b-9e47-3439757e586f","Type":"ContainerStarted","Data":"be012887000c7ddf1b1038e6bc72955356850176a48f55a7e606b6176209b66c"} Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.138497 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.13847893 podStartE2EDuration="2.13847893s" podCreationTimestamp="2025-12-01 19:54:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:55:01.128793263 +0000 UTC m=+1300.999823167" watchObservedRunningTime="2025-12-01 19:55:01.13847893 +0000 UTC m=+1301.009508844" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.165615 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-logs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.165674 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.165744 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.165780 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.166099 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-config-data\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.166143 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24n9p\" (UniqueName: \"kubernetes.io/projected/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-kube-api-access-24n9p\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.268155 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.268916 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.269021 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-config-data\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.269047 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24n9p\" (UniqueName: \"kubernetes.io/projected/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-kube-api-access-24n9p\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.269119 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-logs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.269148 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.270313 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-logs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.272866 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-public-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.273250 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.273759 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-config-data\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.285517 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.294748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24n9p\" (UniqueName: \"kubernetes.io/projected/8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed-kube-api-access-24n9p\") pod \"nova-api-0\" (UID: \"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed\") " pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.384726 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 19:55:01 crc kubenswrapper[4888]: W1201 19:55:01.849923 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c3ae8a7_95af_4a5c_b4a7_70f0950b83ed.slice/crio-47b9df02a03c254343c2cd1212f346a5eb7f57cb322abe2dc20e096fb529921c WatchSource:0}: Error finding container 47b9df02a03c254343c2cd1212f346a5eb7f57cb322abe2dc20e096fb529921c: Status 404 returned error can't find the container with id 47b9df02a03c254343c2cd1212f346a5eb7f57cb322abe2dc20e096fb529921c Dec 01 19:55:01 crc kubenswrapper[4888]: I1201 19:55:01.851605 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 19:55:02 crc kubenswrapper[4888]: I1201 19:55:02.133456 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed","Type":"ContainerStarted","Data":"9cd12dfbb486a9170e0212d99587e2d3afc72e2cd435c77fe84971da5085c4bc"} Dec 01 19:55:02 crc kubenswrapper[4888]: I1201 19:55:02.133942 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed","Type":"ContainerStarted","Data":"47b9df02a03c254343c2cd1212f346a5eb7f57cb322abe2dc20e096fb529921c"} Dec 01 19:55:02 crc kubenswrapper[4888]: I1201 19:55:02.461224 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c32cbe6-786f-4d2f-842d-7084c73afcd1" path="/var/lib/kubelet/pods/6c32cbe6-786f-4d2f-842d-7084c73afcd1/volumes" Dec 01 19:55:02 crc kubenswrapper[4888]: I1201 19:55:02.735274 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:55:02 crc kubenswrapper[4888]: I1201 19:55:02.735327 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 19:55:03 crc kubenswrapper[4888]: I1201 19:55:03.142782 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed","Type":"ContainerStarted","Data":"221b7e023b06aaa3765f89a3b3a943792497e2e7e9f17ad8c4299eab1d66974f"} Dec 01 19:55:03 crc kubenswrapper[4888]: I1201 19:55:03.163953 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.163928628 podStartE2EDuration="2.163928628s" podCreationTimestamp="2025-12-01 19:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:55:03.161370758 +0000 UTC m=+1303.032400692" watchObservedRunningTime="2025-12-01 19:55:03.163928628 +0000 UTC m=+1303.034958552" Dec 01 19:55:04 crc kubenswrapper[4888]: I1201 19:55:04.903417 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 19:55:07 crc kubenswrapper[4888]: I1201 19:55:07.736170 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 19:55:07 crc kubenswrapper[4888]: I1201 19:55:07.736237 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 19:55:08 crc kubenswrapper[4888]: I1201 19:55:08.749461 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="649d802d-a02d-403f-938c-8875b22f1e04" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:55:08 crc kubenswrapper[4888]: I1201 19:55:08.749461 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="649d802d-a02d-403f-938c-8875b22f1e04" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:55:09 crc kubenswrapper[4888]: I1201 19:55:09.903607 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 19:55:09 crc kubenswrapper[4888]: I1201 19:55:09.937921 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 19:55:10 crc kubenswrapper[4888]: I1201 19:55:10.253677 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 19:55:10 crc kubenswrapper[4888]: I1201 19:55:10.268038 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 01 19:55:11 crc kubenswrapper[4888]: I1201 19:55:11.386218 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:55:11 crc kubenswrapper[4888]: I1201 19:55:11.386277 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 19:55:12 crc kubenswrapper[4888]: I1201 19:55:12.396493 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:55:12 crc kubenswrapper[4888]: I1201 19:55:12.406412 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.074957 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218539 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218594 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218716 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218838 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218874 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218918 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.218973 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89ksl\" (UniqueName: \"kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.219036 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd\") pod \"3f55dfd9-ee17-4533-ab5e-870900af36c9\" (UID: \"3f55dfd9-ee17-4533-ab5e-870900af36c9\") " Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.219381 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.219594 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.224560 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts" (OuterVolumeSpecName: "scripts") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.224701 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl" (OuterVolumeSpecName: "kube-api-access-89ksl") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "kube-api-access-89ksl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.251410 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.274003 4888 generic.go:334] "Generic (PLEG): container finished" podID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerID="1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246" exitCode=137 Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.274056 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerDied","Data":"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246"} Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.274090 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3f55dfd9-ee17-4533-ab5e-870900af36c9","Type":"ContainerDied","Data":"bcf2e1b54169996b72ca53cb4fa8ffd22e67dc00d6a340deea19159e5a4e27c8"} Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.274112 4888 scope.go:117] "RemoveContainer" containerID="1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.274306 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.282888 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.298980 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323029 4888 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323070 4888 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323083 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323099 4888 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323111 4888 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323124 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89ksl\" (UniqueName: \"kubernetes.io/projected/3f55dfd9-ee17-4533-ab5e-870900af36c9-kube-api-access-89ksl\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.323136 4888 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3f55dfd9-ee17-4533-ab5e-870900af36c9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.328318 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data" (OuterVolumeSpecName: "config-data") pod "3f55dfd9-ee17-4533-ab5e-870900af36c9" (UID: "3f55dfd9-ee17-4533-ab5e-870900af36c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.344307 4888 scope.go:117] "RemoveContainer" containerID="5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.365175 4888 scope.go:117] "RemoveContainer" containerID="0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.391442 4888 scope.go:117] "RemoveContainer" containerID="ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.411813 4888 scope.go:117] "RemoveContainer" containerID="1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.412378 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246\": container with ID starting with 1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246 not found: ID does not exist" containerID="1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.412421 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246"} err="failed to get container status \"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246\": rpc error: code = NotFound desc = could not find container \"1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246\": container with ID starting with 1cae2448c0e1c96993b26de18691b2e02e61d98c071b1e866f0c2ef387ba1246 not found: ID does not exist" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.412445 4888 scope.go:117] "RemoveContainer" containerID="5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.412766 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f\": container with ID starting with 5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f not found: ID does not exist" containerID="5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.412820 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f"} err="failed to get container status \"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f\": rpc error: code = NotFound desc = could not find container \"5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f\": container with ID starting with 5b5979adeccae65583392a0c550b2cb6ba6f2fc84c05d41f6e11c1a7d642cc5f not found: ID does not exist" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.412857 4888 scope.go:117] "RemoveContainer" containerID="0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.413286 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b\": container with ID starting with 0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b not found: ID does not exist" containerID="0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.413332 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b"} err="failed to get container status \"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b\": rpc error: code = NotFound desc = could not find container \"0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b\": container with ID starting with 0e7afaa954bc482b5ce9bae569b466ce5d9e157ba7a078ad5fda17eeefb5b44b not found: ID does not exist" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.413366 4888 scope.go:117] "RemoveContainer" containerID="ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.413707 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153\": container with ID starting with ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153 not found: ID does not exist" containerID="ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.413849 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153"} err="failed to get container status \"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153\": rpc error: code = NotFound desc = could not find container \"ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153\": container with ID starting with ecfcd2a537f08fbc9916b352c31275f84798b7012f5f8deb63c765fbbda23153 not found: ID does not exist" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.425324 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f55dfd9-ee17-4533-ab5e-870900af36c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.596799 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.604998 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.623734 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.624215 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="sg-core" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624240 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="sg-core" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.624282 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="proxy-httpd" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624291 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="proxy-httpd" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.624308 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-central-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624316 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-central-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: E1201 19:55:16.624329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-notification-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624337 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-notification-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624550 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="sg-core" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624577 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="proxy-httpd" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624593 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-notification-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.624619 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" containerName="ceilometer-central-agent" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.627047 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.630951 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.631575 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.632059 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.643033 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731512 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731561 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731616 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-config-data\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731677 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-log-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731705 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-run-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731727 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-scripts\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731743 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.731783 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxr7m\" (UniqueName: \"kubernetes.io/projected/2be11eda-a57e-402c-a39a-f72af50268ef-kube-api-access-gxr7m\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.833849 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxr7m\" (UniqueName: \"kubernetes.io/projected/2be11eda-a57e-402c-a39a-f72af50268ef-kube-api-access-gxr7m\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.833920 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.833940 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.833991 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-config-data\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.834055 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-log-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.834085 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-run-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.834106 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-scripts\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.834119 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.834963 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-run-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.835012 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2be11eda-a57e-402c-a39a-f72af50268ef-log-httpd\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.841080 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.841636 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.846147 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-config-data\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.846888 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-scripts\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.854995 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2be11eda-a57e-402c-a39a-f72af50268ef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.891028 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxr7m\" (UniqueName: \"kubernetes.io/projected/2be11eda-a57e-402c-a39a-f72af50268ef-kube-api-access-gxr7m\") pod \"ceilometer-0\" (UID: \"2be11eda-a57e-402c-a39a-f72af50268ef\") " pod="openstack/ceilometer-0" Dec 01 19:55:16 crc kubenswrapper[4888]: I1201 19:55:16.955055 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 19:55:17 crc kubenswrapper[4888]: I1201 19:55:17.448716 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 19:55:17 crc kubenswrapper[4888]: W1201 19:55:17.455707 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2be11eda_a57e_402c_a39a_f72af50268ef.slice/crio-05deaacbb091064129fb3e73f5a2e2bd70381ae70017e14190ff0b8e57b45b23 WatchSource:0}: Error finding container 05deaacbb091064129fb3e73f5a2e2bd70381ae70017e14190ff0b8e57b45b23: Status 404 returned error can't find the container with id 05deaacbb091064129fb3e73f5a2e2bd70381ae70017e14190ff0b8e57b45b23 Dec 01 19:55:17 crc kubenswrapper[4888]: I1201 19:55:17.740670 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 19:55:17 crc kubenswrapper[4888]: I1201 19:55:17.741708 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 19:55:17 crc kubenswrapper[4888]: I1201 19:55:17.745065 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 19:55:18 crc kubenswrapper[4888]: I1201 19:55:18.294863 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2be11eda-a57e-402c-a39a-f72af50268ef","Type":"ContainerStarted","Data":"05deaacbb091064129fb3e73f5a2e2bd70381ae70017e14190ff0b8e57b45b23"} Dec 01 19:55:18 crc kubenswrapper[4888]: I1201 19:55:18.302355 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 19:55:18 crc kubenswrapper[4888]: I1201 19:55:18.462400 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f55dfd9-ee17-4533-ab5e-870900af36c9" path="/var/lib/kubelet/pods/3f55dfd9-ee17-4533-ab5e-870900af36c9/volumes" Dec 01 19:55:19 crc kubenswrapper[4888]: I1201 19:55:19.305730 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2be11eda-a57e-402c-a39a-f72af50268ef","Type":"ContainerStarted","Data":"daee99b5f0647e4458670977d187ba24dd46d242e8e8a9a967f5d8d8687a8618"} Dec 01 19:55:19 crc kubenswrapper[4888]: I1201 19:55:19.306026 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2be11eda-a57e-402c-a39a-f72af50268ef","Type":"ContainerStarted","Data":"6b16248c877331de9e81410eda210ec5d5cfa328d97eb8a255fdc95bbabeef4d"} Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.038303 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.038595 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.038639 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.039401 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.039444 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec" gracePeriod=600 Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.316115 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec" exitCode=0 Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.316208 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec"} Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.316287 4888 scope.go:117] "RemoveContainer" containerID="a9b5fe3b9907856ca6ec97c5b3f53aa9af1c08618e093b179f83920b289c6e3e" Dec 01 19:55:20 crc kubenswrapper[4888]: I1201 19:55:20.322662 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2be11eda-a57e-402c-a39a-f72af50268ef","Type":"ContainerStarted","Data":"4399cb57e2986ebcc7c36bd0cb37afe83292e8d49812e4329d4015c8f3118ff8"} Dec 01 19:55:21 crc kubenswrapper[4888]: I1201 19:55:21.361654 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb"} Dec 01 19:55:21 crc kubenswrapper[4888]: I1201 19:55:21.479074 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 19:55:21 crc kubenswrapper[4888]: I1201 19:55:21.479604 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 19:55:21 crc kubenswrapper[4888]: I1201 19:55:21.485688 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 19:55:21 crc kubenswrapper[4888]: I1201 19:55:21.494820 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 19:55:22 crc kubenswrapper[4888]: I1201 19:55:22.375165 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2be11eda-a57e-402c-a39a-f72af50268ef","Type":"ContainerStarted","Data":"f10ad09f22055f0674893165e8141bf9c61df306e35b8e87946f780c13727de9"} Dec 01 19:55:22 crc kubenswrapper[4888]: I1201 19:55:22.375672 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 19:55:22 crc kubenswrapper[4888]: I1201 19:55:22.389656 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 19:55:22 crc kubenswrapper[4888]: I1201 19:55:22.401459 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.716699045 podStartE2EDuration="6.401436416s" podCreationTimestamp="2025-12-01 19:55:16 +0000 UTC" firstStartedPulling="2025-12-01 19:55:17.457962646 +0000 UTC m=+1317.328992560" lastFinishedPulling="2025-12-01 19:55:21.142700027 +0000 UTC m=+1321.013729931" observedRunningTime="2025-12-01 19:55:22.397157448 +0000 UTC m=+1322.268187382" watchObservedRunningTime="2025-12-01 19:55:22.401436416 +0000 UTC m=+1322.272466330" Dec 01 19:55:23 crc kubenswrapper[4888]: I1201 19:55:23.384447 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 19:55:46 crc kubenswrapper[4888]: I1201 19:55:46.966205 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 19:55:56 crc kubenswrapper[4888]: I1201 19:55:56.391088 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:55:57 crc kubenswrapper[4888]: I1201 19:55:57.393865 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:00 crc kubenswrapper[4888]: I1201 19:56:00.977430 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="rabbitmq" containerID="cri-o://7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd" gracePeriod=604796 Dec 01 19:56:01 crc kubenswrapper[4888]: I1201 19:56:01.614549 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="rabbitmq" containerID="cri-o://6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0" gracePeriod=604796 Dec 01 19:56:05 crc kubenswrapper[4888]: I1201 19:56:05.353749 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 01 19:56:05 crc kubenswrapper[4888]: I1201 19:56:05.634360 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.556554 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738022 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738116 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738160 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738307 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2nrzz\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738389 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738423 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738456 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738485 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738520 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738549 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.738589 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data\") pod \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\" (UID: \"f46b2389-73b7-4b69-a316-ab9e17fc8d1f\") " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.739289 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.740262 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.740665 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.744982 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.750611 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.750848 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.760576 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz" (OuterVolumeSpecName: "kube-api-access-2nrzz") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "kube-api-access-2nrzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.760885 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info" (OuterVolumeSpecName: "pod-info") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.776286 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data" (OuterVolumeSpecName: "config-data") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.796959 4888 generic.go:334] "Generic (PLEG): container finished" podID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerID="7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd" exitCode=0 Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.797001 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerDied","Data":"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd"} Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.797028 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"f46b2389-73b7-4b69-a316-ab9e17fc8d1f","Type":"ContainerDied","Data":"c2de545a47b4fd07c5db1312457674639b43060ee2db7b9ea6d745032d28c473"} Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.797048 4888 scope.go:117] "RemoveContainer" containerID="7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.797174 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.825290 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf" (OuterVolumeSpecName: "server-conf") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841649 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841720 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841736 4888 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841751 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841764 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841802 4888 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841815 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841826 4888 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841837 4888 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.841850 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2nrzz\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-kube-api-access-2nrzz\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.872836 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.898356 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f46b2389-73b7-4b69-a316-ab9e17fc8d1f" (UID: "f46b2389-73b7-4b69-a316-ab9e17fc8d1f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.950502 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.950536 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f46b2389-73b7-4b69-a316-ab9e17fc8d1f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.967842 4888 scope.go:117] "RemoveContainer" containerID="db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.990333 4888 scope.go:117] "RemoveContainer" containerID="7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd" Dec 01 19:56:07 crc kubenswrapper[4888]: E1201 19:56:07.990979 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd\": container with ID starting with 7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd not found: ID does not exist" containerID="7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.991104 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd"} err="failed to get container status \"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd\": rpc error: code = NotFound desc = could not find container \"7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd\": container with ID starting with 7430d8655c7821b89ffa22ce641610dc6c2b7589aa91001f72d42c208ef95abd not found: ID does not exist" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.991610 4888 scope.go:117] "RemoveContainer" containerID="db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33" Dec 01 19:56:07 crc kubenswrapper[4888]: E1201 19:56:07.992067 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33\": container with ID starting with db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33 not found: ID does not exist" containerID="db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33" Dec 01 19:56:07 crc kubenswrapper[4888]: I1201 19:56:07.992103 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33"} err="failed to get container status \"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33\": rpc error: code = NotFound desc = could not find container \"db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33\": container with ID starting with db5c02ced5768cc44a68b88bff7398c3c0b634dd62692b395c0c93853a697b33 not found: ID does not exist" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.083810 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154294 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154395 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154417 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mhkk\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154439 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154460 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154505 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154522 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154540 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154561 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154589 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.154616 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf\") pod \"f994d099-faac-4c30-8cab-e6ef9b8772cd\" (UID: \"f994d099-faac-4c30-8cab-e6ef9b8772cd\") " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.156076 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.156505 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.156924 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.159319 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk" (OuterVolumeSpecName: "kube-api-access-2mhkk") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "kube-api-access-2mhkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.159956 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info" (OuterVolumeSpecName: "pod-info") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.161400 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.162453 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.164392 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.178439 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.198817 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.219264 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.219825 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="setup-container" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.219844 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="setup-container" Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.219858 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="setup-container" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.219867 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="setup-container" Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.219919 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.219928 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.219945 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.219953 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.220209 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.220246 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" containerName="rabbitmq" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.221604 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.223866 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.224901 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data" (OuterVolumeSpecName: "config-data") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.226451 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.226773 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.226958 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.227124 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-tpks7" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.227752 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.228017 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.235008 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.253009 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf" (OuterVolumeSpecName: "server-conf") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257709 4888 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f994d099-faac-4c30-8cab-e6ef9b8772cd-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257745 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257757 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257768 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257801 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257814 4888 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257845 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257856 4888 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f994d099-faac-4c30-8cab-e6ef9b8772cd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257866 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mhkk\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-kube-api-access-2mhkk\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.257878 4888 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f994d099-faac-4c30-8cab-e6ef9b8772cd-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.298558 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360110 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360247 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360290 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360351 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360447 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360511 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360594 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360711 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl6d9\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-kube-api-access-tl6d9\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360747 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360782 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360816 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.360911 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.370153 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f994d099-faac-4c30-8cab-e6ef9b8772cd" (UID: "f994d099-faac-4c30-8cab-e6ef9b8772cd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462484 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462530 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462578 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462596 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462612 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462628 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462655 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462685 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462729 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462760 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl6d9\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-kube-api-access-tl6d9\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462778 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462807 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f46b2389-73b7-4b69-a316-ab9e17fc8d1f" path="/var/lib/kubelet/pods/f46b2389-73b7-4b69-a316-ab9e17fc8d1f/volumes" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.462824 4888 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f994d099-faac-4c30-8cab-e6ef9b8772cd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.463543 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-config-data\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.464090 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.464600 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.465140 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.465353 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.465411 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.470579 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.470930 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.471767 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.471870 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.485878 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl6d9\" (UniqueName: \"kubernetes.io/projected/ff7c4d43-2663-4f78-a40b-8a6dc418c31c-kube-api-access-tl6d9\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.512676 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"ff7c4d43-2663-4f78-a40b-8a6dc418c31c\") " pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.729613 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.808227 4888 generic.go:334] "Generic (PLEG): container finished" podID="f994d099-faac-4c30-8cab-e6ef9b8772cd" containerID="6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0" exitCode=0 Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.808275 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerDied","Data":"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0"} Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.808288 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.808317 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f994d099-faac-4c30-8cab-e6ef9b8772cd","Type":"ContainerDied","Data":"79229edc17ab7298a11cb3a77eecbb93a14e4aa81c24064f3a33e3e1da6d91f4"} Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.808340 4888 scope.go:117] "RemoveContainer" containerID="6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.836107 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.836748 4888 scope.go:117] "RemoveContainer" containerID="c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.846356 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.876112 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.877954 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.884642 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885011 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885219 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885368 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885542 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885681 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-l5lzk" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.885814 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.886470 4888 scope.go:117] "RemoveContainer" containerID="6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0" Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.889289 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0\": container with ID starting with 6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0 not found: ID does not exist" containerID="6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.889394 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0"} err="failed to get container status \"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0\": rpc error: code = NotFound desc = could not find container \"6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0\": container with ID starting with 6325cea08b986802dc1f61aeed84c011c3be5e6880c3017946dcf2bd6e8553a0 not found: ID does not exist" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.889423 4888 scope.go:117] "RemoveContainer" containerID="c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d" Dec 01 19:56:08 crc kubenswrapper[4888]: E1201 19:56:08.895476 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d\": container with ID starting with c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d not found: ID does not exist" containerID="c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.895734 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d"} err="failed to get container status \"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d\": rpc error: code = NotFound desc = could not find container \"c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d\": container with ID starting with c22aa79097486103c9feccf21ddf8afbc34a6bad6096992b9d859eb024421b0d not found: ID does not exist" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.921999 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978403 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978518 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978565 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978648 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrz9t\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-kube-api-access-jrz9t\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978698 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978741 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978827 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c47dabe8-c903-4454-82c5-3c4a28322366-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978859 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c47dabe8-c903-4454-82c5-3c4a28322366-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978878 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978906 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:08 crc kubenswrapper[4888]: I1201 19:56:08.978957 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081066 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081219 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081324 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081387 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081484 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrz9t\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-kube-api-access-jrz9t\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081553 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081621 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081786 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c47dabe8-c903-4454-82c5-3c4a28322366-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081870 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c47dabe8-c903-4454-82c5-3c4a28322366-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081899 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.081931 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.082177 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.082247 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.082650 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.083267 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.083868 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.084950 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c47dabe8-c903-4454-82c5-3c4a28322366-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.088955 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c47dabe8-c903-4454-82c5-3c4a28322366-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.089861 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c47dabe8-c903-4454-82c5-3c4a28322366-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.098911 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.098954 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.101593 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrz9t\" (UniqueName: \"kubernetes.io/projected/c47dabe8-c903-4454-82c5-3c4a28322366-kube-api-access-jrz9t\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.123986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c47dabe8-c903-4454-82c5-3c4a28322366\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.261179 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.346451 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.797891 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.821129 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c47dabe8-c903-4454-82c5-3c4a28322366","Type":"ContainerStarted","Data":"478c816d54736f075bb408bd4ea94438dad50f24821f4c360599dc02c0681a5d"} Dec 01 19:56:09 crc kubenswrapper[4888]: I1201 19:56:09.827348 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff7c4d43-2663-4f78-a40b-8a6dc418c31c","Type":"ContainerStarted","Data":"5fd565d0763d94cc45e346634bbf8446abef667adaf941a9e7a0b655c6f43fcb"} Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.218297 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.220699 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.223400 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.229529 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305148 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305206 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305251 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmcc5\" (UniqueName: \"kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305274 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305320 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305595 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.305729 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407366 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407458 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407500 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407520 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407556 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmcc5\" (UniqueName: \"kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.407575 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.408523 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.408524 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.408524 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.408896 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.408224 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.409022 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.409472 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.462996 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f994d099-faac-4c30-8cab-e6ef9b8772cd" path="/var/lib/kubelet/pods/f994d099-faac-4c30-8cab-e6ef9b8772cd/volumes" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.493650 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmcc5\" (UniqueName: \"kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5\") pod \"dnsmasq-dns-79bd4cc8c9-bq4fq\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:10 crc kubenswrapper[4888]: I1201 19:56:10.587660 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.066086 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:11 crc kubenswrapper[4888]: W1201 19:56:11.196580 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88618608_c3a2_400e_b273_73c81134a515.slice/crio-eaf9f1cc40865ff66bb3192e441ef795ed124355372859fe6a2dea7fd15b19d1 WatchSource:0}: Error finding container eaf9f1cc40865ff66bb3192e441ef795ed124355372859fe6a2dea7fd15b19d1: Status 404 returned error can't find the container with id eaf9f1cc40865ff66bb3192e441ef795ed124355372859fe6a2dea7fd15b19d1 Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.849953 4888 generic.go:334] "Generic (PLEG): container finished" podID="88618608-c3a2-400e-b273-73c81134a515" containerID="e1d0bbcc83aacf5ebeeb091e05a1785fbafc5a6595e917426ecb4f8ea0a6ad97" exitCode=0 Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.850005 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" event={"ID":"88618608-c3a2-400e-b273-73c81134a515","Type":"ContainerDied","Data":"e1d0bbcc83aacf5ebeeb091e05a1785fbafc5a6595e917426ecb4f8ea0a6ad97"} Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.850446 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" event={"ID":"88618608-c3a2-400e-b273-73c81134a515","Type":"ContainerStarted","Data":"eaf9f1cc40865ff66bb3192e441ef795ed124355372859fe6a2dea7fd15b19d1"} Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.851819 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c47dabe8-c903-4454-82c5-3c4a28322366","Type":"ContainerStarted","Data":"5463d236238b4f88b9adcfaa5839d43ee2325fe4d61d30edaab44ac2f170753d"} Dec 01 19:56:11 crc kubenswrapper[4888]: I1201 19:56:11.852713 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff7c4d43-2663-4f78-a40b-8a6dc418c31c","Type":"ContainerStarted","Data":"a9c05ee0c79591dc5cae5341200d02b91be79b8cde492bcf9b150bd4a4dc61de"} Dec 01 19:56:12 crc kubenswrapper[4888]: I1201 19:56:12.865096 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" event={"ID":"88618608-c3a2-400e-b273-73c81134a515","Type":"ContainerStarted","Data":"0613b49acd66258561474122ca94800491fbb4a6baad9ce80107c0d7ea775715"} Dec 01 19:56:12 crc kubenswrapper[4888]: I1201 19:56:12.866672 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:12 crc kubenswrapper[4888]: I1201 19:56:12.886150 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" podStartSLOduration=2.88612333 podStartE2EDuration="2.88612333s" podCreationTimestamp="2025-12-01 19:56:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:56:12.884121105 +0000 UTC m=+1372.755151059" watchObservedRunningTime="2025-12-01 19:56:12.88612333 +0000 UTC m=+1372.757153254" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.588803 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.648899 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.649224 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="dnsmasq-dns" containerID="cri-o://c5f9d62494b09a9b8307e38b2810158fcc13d06e285f2a6b913d1f7fd4abcbcb" gracePeriod=10 Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.793345 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55478c4467-jjqkn"] Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.795454 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.822409 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-jjqkn"] Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929230 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929309 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w27zg\" (UniqueName: \"kubernetes.io/projected/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-kube-api-access-w27zg\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929370 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-config\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929401 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929417 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929463 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.929515 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-svc\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.945555 4888 generic.go:334] "Generic (PLEG): container finished" podID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerID="c5f9d62494b09a9b8307e38b2810158fcc13d06e285f2a6b913d1f7fd4abcbcb" exitCode=0 Dec 01 19:56:20 crc kubenswrapper[4888]: I1201 19:56:20.945621 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" event={"ID":"06b8160e-f040-4c38-a13a-a5ec612f57ef","Type":"ContainerDied","Data":"c5f9d62494b09a9b8307e38b2810158fcc13d06e285f2a6b913d1f7fd4abcbcb"} Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.033774 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-svc\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.033871 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.033951 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w27zg\" (UniqueName: \"kubernetes.io/projected/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-kube-api-access-w27zg\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.034000 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-config\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.034040 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.034070 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.034147 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.034957 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-svc\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.038057 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.038254 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.038364 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-config\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.044933 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.045538 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.062076 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w27zg\" (UniqueName: \"kubernetes.io/projected/0366eee1-e2f6-4c97-a1e2-ed1e374e2021-kube-api-access-w27zg\") pod \"dnsmasq-dns-55478c4467-jjqkn\" (UID: \"0366eee1-e2f6-4c97-a1e2-ed1e374e2021\") " pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.158157 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.305665 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.442964 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.443830 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lp8z\" (UniqueName: \"kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.443881 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.443991 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.444026 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.444255 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config\") pod \"06b8160e-f040-4c38-a13a-a5ec612f57ef\" (UID: \"06b8160e-f040-4c38-a13a-a5ec612f57ef\") " Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.450935 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z" (OuterVolumeSpecName: "kube-api-access-9lp8z") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "kube-api-access-9lp8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.501038 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config" (OuterVolumeSpecName: "config") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.504571 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.511020 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.514683 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.518832 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "06b8160e-f040-4c38-a13a-a5ec612f57ef" (UID: "06b8160e-f040-4c38-a13a-a5ec612f57ef"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547058 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547099 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547111 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lp8z\" (UniqueName: \"kubernetes.io/projected/06b8160e-f040-4c38-a13a-a5ec612f57ef-kube-api-access-9lp8z\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547121 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547131 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.547140 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06b8160e-f040-4c38-a13a-a5ec612f57ef-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.645782 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-jjqkn"] Dec 01 19:56:21 crc kubenswrapper[4888]: W1201 19:56:21.650485 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0366eee1_e2f6_4c97_a1e2_ed1e374e2021.slice/crio-32d2ff79cb031da8a712805603b9f856fd4f475da0e1c9857e0da7592ffa4008 WatchSource:0}: Error finding container 32d2ff79cb031da8a712805603b9f856fd4f475da0e1c9857e0da7592ffa4008: Status 404 returned error can't find the container with id 32d2ff79cb031da8a712805603b9f856fd4f475da0e1c9857e0da7592ffa4008 Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.957996 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" event={"ID":"06b8160e-f040-4c38-a13a-a5ec612f57ef","Type":"ContainerDied","Data":"b20a021394e711aee9e9770d7209a77a69718ebd014c42127c6c9092e1f50fc5"} Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.958050 4888 scope.go:117] "RemoveContainer" containerID="c5f9d62494b09a9b8307e38b2810158fcc13d06e285f2a6b913d1f7fd4abcbcb" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.958194 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.962972 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" event={"ID":"0366eee1-e2f6-4c97-a1e2-ed1e374e2021","Type":"ContainerStarted","Data":"32d2ff79cb031da8a712805603b9f856fd4f475da0e1c9857e0da7592ffa4008"} Dec 01 19:56:21 crc kubenswrapper[4888]: I1201 19:56:21.993371 4888 scope.go:117] "RemoveContainer" containerID="e16814a1eaad7189834808f45f90b3c468825f4aeaa6dff2e401497faa49d023" Dec 01 19:56:22 crc kubenswrapper[4888]: I1201 19:56:22.022246 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:56:22 crc kubenswrapper[4888]: I1201 19:56:22.036257 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-r9fhg"] Dec 01 19:56:22 crc kubenswrapper[4888]: I1201 19:56:22.462950 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" path="/var/lib/kubelet/pods/06b8160e-f040-4c38-a13a-a5ec612f57ef/volumes" Dec 01 19:56:22 crc kubenswrapper[4888]: I1201 19:56:22.996267 4888 generic.go:334] "Generic (PLEG): container finished" podID="0366eee1-e2f6-4c97-a1e2-ed1e374e2021" containerID="2b402531a1c92d905f35a6368c940131aa106803cc457e6bc23891ad93d68e57" exitCode=0 Dec 01 19:56:22 crc kubenswrapper[4888]: I1201 19:56:22.996375 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" event={"ID":"0366eee1-e2f6-4c97-a1e2-ed1e374e2021","Type":"ContainerDied","Data":"2b402531a1c92d905f35a6368c940131aa106803cc457e6bc23891ad93d68e57"} Dec 01 19:56:24 crc kubenswrapper[4888]: I1201 19:56:24.011713 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" event={"ID":"0366eee1-e2f6-4c97-a1e2-ed1e374e2021","Type":"ContainerStarted","Data":"5f72afb6065e0d6f2d30390945927ba00a4a483f4eb249ad7524008a5391ee51"} Dec 01 19:56:24 crc kubenswrapper[4888]: I1201 19:56:24.012321 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:24 crc kubenswrapper[4888]: I1201 19:56:24.038231 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" podStartSLOduration=4.038206703 podStartE2EDuration="4.038206703s" podCreationTimestamp="2025-12-01 19:56:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:56:24.031710234 +0000 UTC m=+1383.902740148" watchObservedRunningTime="2025-12-01 19:56:24.038206703 +0000 UTC m=+1383.909236617" Dec 01 19:56:26 crc kubenswrapper[4888]: I1201 19:56:26.162604 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-89c5cd4d5-r9fhg" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.198:5353: i/o timeout" Dec 01 19:56:31 crc kubenswrapper[4888]: I1201 19:56:31.161434 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55478c4467-jjqkn" Dec 01 19:56:31 crc kubenswrapper[4888]: I1201 19:56:31.240322 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:31 crc kubenswrapper[4888]: I1201 19:56:31.240760 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="dnsmasq-dns" containerID="cri-o://0613b49acd66258561474122ca94800491fbb4a6baad9ce80107c0d7ea775715" gracePeriod=10 Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.093668 4888 generic.go:334] "Generic (PLEG): container finished" podID="88618608-c3a2-400e-b273-73c81134a515" containerID="0613b49acd66258561474122ca94800491fbb4a6baad9ce80107c0d7ea775715" exitCode=0 Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.093752 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" event={"ID":"88618608-c3a2-400e-b273-73c81134a515","Type":"ContainerDied","Data":"0613b49acd66258561474122ca94800491fbb4a6baad9ce80107c0d7ea775715"} Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.271142 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320503 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320556 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320604 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320635 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320734 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320845 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.320897 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmcc5\" (UniqueName: \"kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5\") pod \"88618608-c3a2-400e-b273-73c81134a515\" (UID: \"88618608-c3a2-400e-b273-73c81134a515\") " Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.344857 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5" (OuterVolumeSpecName: "kube-api-access-lmcc5") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "kube-api-access-lmcc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.392898 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.402635 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.405104 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.413213 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.419558 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.420463 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config" (OuterVolumeSpecName: "config") pod "88618608-c3a2-400e-b273-73c81134a515" (UID: "88618608-c3a2-400e-b273-73c81134a515"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425673 4888 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425786 4888 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425842 4888 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-config\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425858 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425874 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425889 4888 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/88618608-c3a2-400e-b273-73c81134a515-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:32 crc kubenswrapper[4888]: I1201 19:56:32.425909 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmcc5\" (UniqueName: \"kubernetes.io/projected/88618608-c3a2-400e-b273-73c81134a515-kube-api-access-lmcc5\") on node \"crc\" DevicePath \"\"" Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.108765 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" event={"ID":"88618608-c3a2-400e-b273-73c81134a515","Type":"ContainerDied","Data":"eaf9f1cc40865ff66bb3192e441ef795ed124355372859fe6a2dea7fd15b19d1"} Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.108836 4888 scope.go:117] "RemoveContainer" containerID="0613b49acd66258561474122ca94800491fbb4a6baad9ce80107c0d7ea775715" Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.109237 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-bq4fq" Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.139578 4888 scope.go:117] "RemoveContainer" containerID="e1d0bbcc83aacf5ebeeb091e05a1785fbafc5a6595e917426ecb4f8ea0a6ad97" Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.144868 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:33 crc kubenswrapper[4888]: I1201 19:56:33.157934 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-bq4fq"] Dec 01 19:56:34 crc kubenswrapper[4888]: I1201 19:56:34.464429 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88618608-c3a2-400e-b273-73c81134a515" path="/var/lib/kubelet/pods/88618608-c3a2-400e-b273-73c81134a515/volumes" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.229697 4888 generic.go:334] "Generic (PLEG): container finished" podID="c47dabe8-c903-4454-82c5-3c4a28322366" containerID="5463d236238b4f88b9adcfaa5839d43ee2325fe4d61d30edaab44ac2f170753d" exitCode=0 Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.229810 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c47dabe8-c903-4454-82c5-3c4a28322366","Type":"ContainerDied","Data":"5463d236238b4f88b9adcfaa5839d43ee2325fe4d61d30edaab44ac2f170753d"} Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.234431 4888 generic.go:334] "Generic (PLEG): container finished" podID="ff7c4d43-2663-4f78-a40b-8a6dc418c31c" containerID="a9c05ee0c79591dc5cae5341200d02b91be79b8cde492bcf9b150bd4a4dc61de" exitCode=0 Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.234510 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff7c4d43-2663-4f78-a40b-8a6dc418c31c","Type":"ContainerDied","Data":"a9c05ee0c79591dc5cae5341200d02b91be79b8cde492bcf9b150bd4a4dc61de"} Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.236602 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx"] Dec 01 19:56:44 crc kubenswrapper[4888]: E1201 19:56:44.237142 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237168 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: E1201 19:56:44.237222 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="init" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237231 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="init" Dec 01 19:56:44 crc kubenswrapper[4888]: E1201 19:56:44.237246 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237255 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: E1201 19:56:44.237274 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="init" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237281 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="init" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237503 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="06b8160e-f040-4c38-a13a-a5ec612f57ef" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.237531 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="88618608-c3a2-400e-b273-73c81134a515" containerName="dnsmasq-dns" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.238484 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.242855 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.242888 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.243074 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.243198 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.252734 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx"] Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.294380 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.294860 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvlq6\" (UniqueName: \"kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.294948 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.295000 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.397407 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.397512 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvlq6\" (UniqueName: \"kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.397573 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.397616 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.401962 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.402085 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.404900 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.415575 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvlq6\" (UniqueName: \"kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:44 crc kubenswrapper[4888]: I1201 19:56:44.513168 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.041846 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx"] Dec 01 19:56:45 crc kubenswrapper[4888]: W1201 19:56:45.043958 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e6c49f3_b69a_4381_b5d6_4a66e283d49f.slice/crio-316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979 WatchSource:0}: Error finding container 316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979: Status 404 returned error can't find the container with id 316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979 Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.244008 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c47dabe8-c903-4454-82c5-3c4a28322366","Type":"ContainerStarted","Data":"d1d92b519adae9cca1287cf8e96b707d8cb122bba758cc0b9d34627fc3e9b066"} Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.244208 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.245495 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" event={"ID":"7e6c49f3-b69a-4381-b5d6-4a66e283d49f","Type":"ContainerStarted","Data":"316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979"} Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.247396 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"ff7c4d43-2663-4f78-a40b-8a6dc418c31c","Type":"ContainerStarted","Data":"400b7638615135dc1d041a9348facdf61149b55ab08fe0ee51c2349405bdf173"} Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.247609 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.265337 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.265313732 podStartE2EDuration="37.265313732s" podCreationTimestamp="2025-12-01 19:56:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:56:45.262584817 +0000 UTC m=+1405.133614731" watchObservedRunningTime="2025-12-01 19:56:45.265313732 +0000 UTC m=+1405.136343646" Dec 01 19:56:45 crc kubenswrapper[4888]: I1201 19:56:45.298252 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.29822287 podStartE2EDuration="37.29822287s" podCreationTimestamp="2025-12-01 19:56:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 19:56:45.292472901 +0000 UTC m=+1405.163502815" watchObservedRunningTime="2025-12-01 19:56:45.29822287 +0000 UTC m=+1405.169252804" Dec 01 19:56:56 crc kubenswrapper[4888]: I1201 19:56:56.368655 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" event={"ID":"7e6c49f3-b69a-4381-b5d6-4a66e283d49f","Type":"ContainerStarted","Data":"05608de38916891f4bc236d6e332325d411f12f7116b2978490c31e560a62746"} Dec 01 19:56:56 crc kubenswrapper[4888]: I1201 19:56:56.391815 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" podStartSLOduration=1.457896403 podStartE2EDuration="12.391789879s" podCreationTimestamp="2025-12-01 19:56:44 +0000 UTC" firstStartedPulling="2025-12-01 19:56:45.046391196 +0000 UTC m=+1404.917421110" lastFinishedPulling="2025-12-01 19:56:55.980284672 +0000 UTC m=+1415.851314586" observedRunningTime="2025-12-01 19:56:56.383504351 +0000 UTC m=+1416.254534265" watchObservedRunningTime="2025-12-01 19:56:56.391789879 +0000 UTC m=+1416.262819803" Dec 01 19:56:58 crc kubenswrapper[4888]: I1201 19:56:58.732586 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 19:56:59 crc kubenswrapper[4888]: I1201 19:56:59.265371 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 19:57:02 crc kubenswrapper[4888]: I1201 19:57:02.831884 4888 scope.go:117] "RemoveContainer" containerID="4c25f404b76e186a32c13d1499de66aa2c6f1d7a100c83209d0f02104263a489" Dec 01 19:57:02 crc kubenswrapper[4888]: I1201 19:57:02.858504 4888 scope.go:117] "RemoveContainer" containerID="bf07f4a5bd735f3e794efa252bb5f438c65b24df2816ae05849a365ea9ae7435" Dec 01 19:57:07 crc kubenswrapper[4888]: I1201 19:57:07.465125 4888 generic.go:334] "Generic (PLEG): container finished" podID="7e6c49f3-b69a-4381-b5d6-4a66e283d49f" containerID="05608de38916891f4bc236d6e332325d411f12f7116b2978490c31e560a62746" exitCode=0 Dec 01 19:57:07 crc kubenswrapper[4888]: I1201 19:57:07.465212 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" event={"ID":"7e6c49f3-b69a-4381-b5d6-4a66e283d49f","Type":"ContainerDied","Data":"05608de38916891f4bc236d6e332325d411f12f7116b2978490c31e560a62746"} Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.834772 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.978504 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle\") pod \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.978707 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory\") pod \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.978874 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key\") pod \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.978923 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvlq6\" (UniqueName: \"kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6\") pod \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\" (UID: \"7e6c49f3-b69a-4381-b5d6-4a66e283d49f\") " Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.984654 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "7e6c49f3-b69a-4381-b5d6-4a66e283d49f" (UID: "7e6c49f3-b69a-4381-b5d6-4a66e283d49f"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:57:08 crc kubenswrapper[4888]: I1201 19:57:08.986442 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6" (OuterVolumeSpecName: "kube-api-access-wvlq6") pod "7e6c49f3-b69a-4381-b5d6-4a66e283d49f" (UID: "7e6c49f3-b69a-4381-b5d6-4a66e283d49f"). InnerVolumeSpecName "kube-api-access-wvlq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.020957 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory" (OuterVolumeSpecName: "inventory") pod "7e6c49f3-b69a-4381-b5d6-4a66e283d49f" (UID: "7e6c49f3-b69a-4381-b5d6-4a66e283d49f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.046476 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7e6c49f3-b69a-4381-b5d6-4a66e283d49f" (UID: "7e6c49f3-b69a-4381-b5d6-4a66e283d49f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.082508 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvlq6\" (UniqueName: \"kubernetes.io/projected/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-kube-api-access-wvlq6\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.082531 4888 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.082542 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.082550 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e6c49f3-b69a-4381-b5d6-4a66e283d49f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.486392 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" event={"ID":"7e6c49f3-b69a-4381-b5d6-4a66e283d49f","Type":"ContainerDied","Data":"316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979"} Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.486663 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="316ef88415a7247583e18d6bb943fdb9993f5fc75d543e0057537e2dbb974979" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.486678 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.577688 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn"] Dec 01 19:57:09 crc kubenswrapper[4888]: E1201 19:57:09.578585 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6c49f3-b69a-4381-b5d6-4a66e283d49f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.578703 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6c49f3-b69a-4381-b5d6-4a66e283d49f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.579082 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6c49f3-b69a-4381-b5d6-4a66e283d49f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.580054 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.582869 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.583205 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.583683 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.583689 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.588798 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn"] Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.693574 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6v68\" (UniqueName: \"kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.693728 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.693790 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.795829 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.796234 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.796323 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6v68\" (UniqueName: \"kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.800548 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.808492 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.811545 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6v68\" (UniqueName: \"kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-9qqjn\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:09 crc kubenswrapper[4888]: I1201 19:57:09.901876 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:10 crc kubenswrapper[4888]: I1201 19:57:10.423904 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn"] Dec 01 19:57:10 crc kubenswrapper[4888]: I1201 19:57:10.498816 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" event={"ID":"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c","Type":"ContainerStarted","Data":"cb8d26b3ae1f0dfa7ac7b9069b83bc37cab2487d0001b5ddbcb51c2a64cb6db7"} Dec 01 19:57:11 crc kubenswrapper[4888]: I1201 19:57:11.513548 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" event={"ID":"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c","Type":"ContainerStarted","Data":"e8bf911122d1767f19ab7c6412ae137f65c62fa0e55988c7d5ef905d0e3b1a90"} Dec 01 19:57:11 crc kubenswrapper[4888]: I1201 19:57:11.534499 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" podStartSLOduration=1.8691324329999999 podStartE2EDuration="2.534477482s" podCreationTimestamp="2025-12-01 19:57:09 +0000 UTC" firstStartedPulling="2025-12-01 19:57:10.427557945 +0000 UTC m=+1430.298587859" lastFinishedPulling="2025-12-01 19:57:11.092902994 +0000 UTC m=+1430.963932908" observedRunningTime="2025-12-01 19:57:11.530118368 +0000 UTC m=+1431.401148292" watchObservedRunningTime="2025-12-01 19:57:11.534477482 +0000 UTC m=+1431.405507396" Dec 01 19:57:14 crc kubenswrapper[4888]: I1201 19:57:14.543525 4888 generic.go:334] "Generic (PLEG): container finished" podID="44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" containerID="e8bf911122d1767f19ab7c6412ae137f65c62fa0e55988c7d5ef905d0e3b1a90" exitCode=0 Dec 01 19:57:14 crc kubenswrapper[4888]: I1201 19:57:14.543613 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" event={"ID":"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c","Type":"ContainerDied","Data":"e8bf911122d1767f19ab7c6412ae137f65c62fa0e55988c7d5ef905d0e3b1a90"} Dec 01 19:57:15 crc kubenswrapper[4888]: I1201 19:57:15.991689 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.115956 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key\") pod \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.116090 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory\") pod \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.116227 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6v68\" (UniqueName: \"kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68\") pod \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\" (UID: \"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c\") " Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.121549 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68" (OuterVolumeSpecName: "kube-api-access-z6v68") pod "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" (UID: "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c"). InnerVolumeSpecName "kube-api-access-z6v68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.143655 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" (UID: "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.144109 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory" (OuterVolumeSpecName: "inventory") pod "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" (UID: "44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.218605 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.218646 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.218656 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6v68\" (UniqueName: \"kubernetes.io/projected/44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c-kube-api-access-z6v68\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.562463 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" event={"ID":"44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c","Type":"ContainerDied","Data":"cb8d26b3ae1f0dfa7ac7b9069b83bc37cab2487d0001b5ddbcb51c2a64cb6db7"} Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.562772 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb8d26b3ae1f0dfa7ac7b9069b83bc37cab2487d0001b5ddbcb51c2a64cb6db7" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.562841 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-9qqjn" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.628146 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx"] Dec 01 19:57:16 crc kubenswrapper[4888]: E1201 19:57:16.628589 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.628609 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.628812 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.629460 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.635475 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.636077 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.636423 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.637830 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.639550 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx"] Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.728289 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.728447 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8qlx\" (UniqueName: \"kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.728502 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.728579 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.830377 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.831112 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.831208 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.831564 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8qlx\" (UniqueName: \"kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.834536 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.835253 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.837510 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.851986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8qlx\" (UniqueName: \"kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:16 crc kubenswrapper[4888]: I1201 19:57:16.951968 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 19:57:17 crc kubenswrapper[4888]: I1201 19:57:17.504692 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx"] Dec 01 19:57:17 crc kubenswrapper[4888]: W1201 19:57:17.509898 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04974f6b_2545_433f_907d_5f97024057d4.slice/crio-9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b WatchSource:0}: Error finding container 9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b: Status 404 returned error can't find the container with id 9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b Dec 01 19:57:17 crc kubenswrapper[4888]: I1201 19:57:17.573966 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" event={"ID":"04974f6b-2545-433f-907d-5f97024057d4","Type":"ContainerStarted","Data":"9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b"} Dec 01 19:57:18 crc kubenswrapper[4888]: I1201 19:57:18.591712 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" event={"ID":"04974f6b-2545-433f-907d-5f97024057d4","Type":"ContainerStarted","Data":"7d60fc6a06098d78d5924dc37653bbccdea544b67ee9ac9ee8f563d5ec0b3d2b"} Dec 01 19:57:18 crc kubenswrapper[4888]: I1201 19:57:18.610445 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" podStartSLOduration=2.114289979 podStartE2EDuration="2.61042567s" podCreationTimestamp="2025-12-01 19:57:16 +0000 UTC" firstStartedPulling="2025-12-01 19:57:17.513046466 +0000 UTC m=+1437.384076380" lastFinishedPulling="2025-12-01 19:57:18.009182167 +0000 UTC m=+1437.880212071" observedRunningTime="2025-12-01 19:57:18.608862035 +0000 UTC m=+1438.479891949" watchObservedRunningTime="2025-12-01 19:57:18.61042567 +0000 UTC m=+1438.481455574" Dec 01 19:57:20 crc kubenswrapper[4888]: I1201 19:57:20.037622 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:57:20 crc kubenswrapper[4888]: I1201 19:57:20.037697 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.082259 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.091650 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.119299 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.132792 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.132854 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx2wf\" (UniqueName: \"kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.132876 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.234670 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.235097 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.235138 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx2wf\" (UniqueName: \"kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.235395 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.235559 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.266252 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx2wf\" (UniqueName: \"kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf\") pod \"redhat-operators-7j24m\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.419785 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:22 crc kubenswrapper[4888]: I1201 19:57:22.902086 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:23 crc kubenswrapper[4888]: I1201 19:57:23.644651 4888 generic.go:334] "Generic (PLEG): container finished" podID="519f4ced-c308-4504-92d3-bd7330053302" containerID="c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab" exitCode=0 Dec 01 19:57:23 crc kubenswrapper[4888]: I1201 19:57:23.644869 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerDied","Data":"c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab"} Dec 01 19:57:23 crc kubenswrapper[4888]: I1201 19:57:23.646245 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerStarted","Data":"ea4dc0270a37813a7c652c6a2d39628a98ad1a6b98f59e8064d1bcc38c9c3b2a"} Dec 01 19:57:25 crc kubenswrapper[4888]: I1201 19:57:25.666400 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerStarted","Data":"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804"} Dec 01 19:57:27 crc kubenswrapper[4888]: I1201 19:57:27.683906 4888 generic.go:334] "Generic (PLEG): container finished" podID="519f4ced-c308-4504-92d3-bd7330053302" containerID="8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804" exitCode=0 Dec 01 19:57:27 crc kubenswrapper[4888]: I1201 19:57:27.684209 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerDied","Data":"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804"} Dec 01 19:57:28 crc kubenswrapper[4888]: I1201 19:57:28.695717 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerStarted","Data":"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34"} Dec 01 19:57:28 crc kubenswrapper[4888]: I1201 19:57:28.711560 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7j24m" podStartSLOduration=1.990136209 podStartE2EDuration="6.71154259s" podCreationTimestamp="2025-12-01 19:57:22 +0000 UTC" firstStartedPulling="2025-12-01 19:57:23.647105493 +0000 UTC m=+1443.518135397" lastFinishedPulling="2025-12-01 19:57:28.368511864 +0000 UTC m=+1448.239541778" observedRunningTime="2025-12-01 19:57:28.711367915 +0000 UTC m=+1448.582397829" watchObservedRunningTime="2025-12-01 19:57:28.71154259 +0000 UTC m=+1448.582572504" Dec 01 19:57:32 crc kubenswrapper[4888]: I1201 19:57:32.420847 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:32 crc kubenswrapper[4888]: I1201 19:57:32.421405 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:33 crc kubenswrapper[4888]: I1201 19:57:33.469250 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7j24m" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="registry-server" probeResult="failure" output=< Dec 01 19:57:33 crc kubenswrapper[4888]: timeout: failed to connect service ":50051" within 1s Dec 01 19:57:33 crc kubenswrapper[4888]: > Dec 01 19:57:42 crc kubenswrapper[4888]: I1201 19:57:42.480548 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:42 crc kubenswrapper[4888]: I1201 19:57:42.528089 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:42 crc kubenswrapper[4888]: I1201 19:57:42.731035 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:43 crc kubenswrapper[4888]: I1201 19:57:43.853945 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7j24m" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="registry-server" containerID="cri-o://c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34" gracePeriod=2 Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.343677 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.522478 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content\") pod \"519f4ced-c308-4504-92d3-bd7330053302\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.522948 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities\") pod \"519f4ced-c308-4504-92d3-bd7330053302\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.523075 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx2wf\" (UniqueName: \"kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf\") pod \"519f4ced-c308-4504-92d3-bd7330053302\" (UID: \"519f4ced-c308-4504-92d3-bd7330053302\") " Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.524115 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities" (OuterVolumeSpecName: "utilities") pod "519f4ced-c308-4504-92d3-bd7330053302" (UID: "519f4ced-c308-4504-92d3-bd7330053302"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.524471 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.535427 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf" (OuterVolumeSpecName: "kube-api-access-vx2wf") pod "519f4ced-c308-4504-92d3-bd7330053302" (UID: "519f4ced-c308-4504-92d3-bd7330053302"). InnerVolumeSpecName "kube-api-access-vx2wf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.624555 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "519f4ced-c308-4504-92d3-bd7330053302" (UID: "519f4ced-c308-4504-92d3-bd7330053302"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.626626 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/519f4ced-c308-4504-92d3-bd7330053302-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.626671 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx2wf\" (UniqueName: \"kubernetes.io/projected/519f4ced-c308-4504-92d3-bd7330053302-kube-api-access-vx2wf\") on node \"crc\" DevicePath \"\"" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.863084 4888 generic.go:334] "Generic (PLEG): container finished" podID="519f4ced-c308-4504-92d3-bd7330053302" containerID="c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34" exitCode=0 Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.863133 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerDied","Data":"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34"} Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.863161 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7j24m" event={"ID":"519f4ced-c308-4504-92d3-bd7330053302","Type":"ContainerDied","Data":"ea4dc0270a37813a7c652c6a2d39628a98ad1a6b98f59e8064d1bcc38c9c3b2a"} Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.863179 4888 scope.go:117] "RemoveContainer" containerID="c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.863334 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7j24m" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.894688 4888 scope.go:117] "RemoveContainer" containerID="8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.905208 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.919693 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7j24m"] Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.925827 4888 scope.go:117] "RemoveContainer" containerID="c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.969796 4888 scope.go:117] "RemoveContainer" containerID="c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34" Dec 01 19:57:44 crc kubenswrapper[4888]: E1201 19:57:44.970425 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34\": container with ID starting with c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34 not found: ID does not exist" containerID="c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.970483 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34"} err="failed to get container status \"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34\": rpc error: code = NotFound desc = could not find container \"c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34\": container with ID starting with c4d2f515493f5b6a07a089592dcbdff460610662dab52c1934353143a109bd34 not found: ID does not exist" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.970531 4888 scope.go:117] "RemoveContainer" containerID="8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804" Dec 01 19:57:44 crc kubenswrapper[4888]: E1201 19:57:44.970996 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804\": container with ID starting with 8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804 not found: ID does not exist" containerID="8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.971023 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804"} err="failed to get container status \"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804\": rpc error: code = NotFound desc = could not find container \"8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804\": container with ID starting with 8d6311aa8b6376ac1aff38b5a3847965d968d14ea1887490887080333fac2804 not found: ID does not exist" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.971040 4888 scope.go:117] "RemoveContainer" containerID="c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab" Dec 01 19:57:44 crc kubenswrapper[4888]: E1201 19:57:44.971352 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab\": container with ID starting with c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab not found: ID does not exist" containerID="c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab" Dec 01 19:57:44 crc kubenswrapper[4888]: I1201 19:57:44.971381 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab"} err="failed to get container status \"c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab\": rpc error: code = NotFound desc = could not find container \"c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab\": container with ID starting with c5a8cb4aec82cbbe0054880c438f8268ec5c1f36dbc146f0645f16272ef507ab not found: ID does not exist" Dec 01 19:57:46 crc kubenswrapper[4888]: I1201 19:57:46.462620 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519f4ced-c308-4504-92d3-bd7330053302" path="/var/lib/kubelet/pods/519f4ced-c308-4504-92d3-bd7330053302/volumes" Dec 01 19:57:50 crc kubenswrapper[4888]: I1201 19:57:50.037705 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:57:50 crc kubenswrapper[4888]: I1201 19:57:50.038428 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.216357 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:57:59 crc kubenswrapper[4888]: E1201 19:57:59.217531 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="registry-server" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.217558 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="registry-server" Dec 01 19:57:59 crc kubenswrapper[4888]: E1201 19:57:59.217607 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="extract-content" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.217616 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="extract-content" Dec 01 19:57:59 crc kubenswrapper[4888]: E1201 19:57:59.217629 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="extract-utilities" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.217637 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="extract-utilities" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.217855 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="519f4ced-c308-4504-92d3-bd7330053302" containerName="registry-server" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.219553 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.233600 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.405631 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n9v5\" (UniqueName: \"kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.405692 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.405722 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.507256 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n9v5\" (UniqueName: \"kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.507373 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.507420 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.508128 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.508678 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.533311 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n9v5\" (UniqueName: \"kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5\") pod \"redhat-marketplace-qdxb6\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:57:59 crc kubenswrapper[4888]: I1201 19:57:59.550155 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:00 crc kubenswrapper[4888]: I1201 19:58:00.017300 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:58:01 crc kubenswrapper[4888]: I1201 19:58:01.007365 4888 generic.go:334] "Generic (PLEG): container finished" podID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerID="0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1" exitCode=0 Dec 01 19:58:01 crc kubenswrapper[4888]: I1201 19:58:01.007425 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerDied","Data":"0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1"} Dec 01 19:58:01 crc kubenswrapper[4888]: I1201 19:58:01.007665 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerStarted","Data":"48edbe79ce6ec18e75fcfe3ad48f5765b5ae39141a342adba9a1cd3a02d66757"} Dec 01 19:58:02 crc kubenswrapper[4888]: I1201 19:58:02.022662 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerStarted","Data":"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231"} Dec 01 19:58:03 crc kubenswrapper[4888]: I1201 19:58:03.000607 4888 scope.go:117] "RemoveContainer" containerID="1aa75780a4b1c0ba06164d7973e7b01ac1e1f5db67213e7f2ebc07e2732ae2d7" Dec 01 19:58:03 crc kubenswrapper[4888]: I1201 19:58:03.040995 4888 scope.go:117] "RemoveContainer" containerID="5cc9313e30870523146105d1e43acd46a476c4390f8804e246937d98c1acc895" Dec 01 19:58:03 crc kubenswrapper[4888]: I1201 19:58:03.043297 4888 generic.go:334] "Generic (PLEG): container finished" podID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerID="5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231" exitCode=0 Dec 01 19:58:03 crc kubenswrapper[4888]: I1201 19:58:03.043332 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerDied","Data":"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231"} Dec 01 19:58:04 crc kubenswrapper[4888]: I1201 19:58:04.055492 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerStarted","Data":"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d"} Dec 01 19:58:04 crc kubenswrapper[4888]: I1201 19:58:04.081664 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qdxb6" podStartSLOduration=2.59570633 podStartE2EDuration="5.081643348s" podCreationTimestamp="2025-12-01 19:57:59 +0000 UTC" firstStartedPulling="2025-12-01 19:58:01.009238181 +0000 UTC m=+1480.880268095" lastFinishedPulling="2025-12-01 19:58:03.495175199 +0000 UTC m=+1483.366205113" observedRunningTime="2025-12-01 19:58:04.072102904 +0000 UTC m=+1483.943132818" watchObservedRunningTime="2025-12-01 19:58:04.081643348 +0000 UTC m=+1483.952673262" Dec 01 19:58:09 crc kubenswrapper[4888]: I1201 19:58:09.550636 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:09 crc kubenswrapper[4888]: I1201 19:58:09.551418 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:09 crc kubenswrapper[4888]: I1201 19:58:09.592930 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:10 crc kubenswrapper[4888]: I1201 19:58:10.170036 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:10 crc kubenswrapper[4888]: I1201 19:58:10.216704 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.128823 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qdxb6" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="registry-server" containerID="cri-o://e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d" gracePeriod=2 Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.605834 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.785917 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content\") pod \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.785972 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities\") pod \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.786248 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n9v5\" (UniqueName: \"kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5\") pod \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\" (UID: \"5cd3b941-2597-4c2e-9b79-d33ed6d89021\") " Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.787031 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities" (OuterVolumeSpecName: "utilities") pod "5cd3b941-2597-4c2e-9b79-d33ed6d89021" (UID: "5cd3b941-2597-4c2e-9b79-d33ed6d89021"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.798599 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5" (OuterVolumeSpecName: "kube-api-access-9n9v5") pod "5cd3b941-2597-4c2e-9b79-d33ed6d89021" (UID: "5cd3b941-2597-4c2e-9b79-d33ed6d89021"). InnerVolumeSpecName "kube-api-access-9n9v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.815501 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cd3b941-2597-4c2e-9b79-d33ed6d89021" (UID: "5cd3b941-2597-4c2e-9b79-d33ed6d89021"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.888599 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n9v5\" (UniqueName: \"kubernetes.io/projected/5cd3b941-2597-4c2e-9b79-d33ed6d89021-kube-api-access-9n9v5\") on node \"crc\" DevicePath \"\"" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.888641 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:58:12 crc kubenswrapper[4888]: I1201 19:58:12.888654 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cd3b941-2597-4c2e-9b79-d33ed6d89021-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.138927 4888 generic.go:334] "Generic (PLEG): container finished" podID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerID="e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d" exitCode=0 Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.139151 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qdxb6" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.140134 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerDied","Data":"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d"} Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.140259 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qdxb6" event={"ID":"5cd3b941-2597-4c2e-9b79-d33ed6d89021","Type":"ContainerDied","Data":"48edbe79ce6ec18e75fcfe3ad48f5765b5ae39141a342adba9a1cd3a02d66757"} Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.140305 4888 scope.go:117] "RemoveContainer" containerID="e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.170462 4888 scope.go:117] "RemoveContainer" containerID="5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.195457 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.204955 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qdxb6"] Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.206300 4888 scope.go:117] "RemoveContainer" containerID="0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.257370 4888 scope.go:117] "RemoveContainer" containerID="e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d" Dec 01 19:58:13 crc kubenswrapper[4888]: E1201 19:58:13.258736 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d\": container with ID starting with e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d not found: ID does not exist" containerID="e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.258804 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d"} err="failed to get container status \"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d\": rpc error: code = NotFound desc = could not find container \"e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d\": container with ID starting with e538911f8c97213a74bd40fa29be8f8ffb96bc79758fe4e506f3f82ec4f3e81d not found: ID does not exist" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.258850 4888 scope.go:117] "RemoveContainer" containerID="5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231" Dec 01 19:58:13 crc kubenswrapper[4888]: E1201 19:58:13.259611 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231\": container with ID starting with 5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231 not found: ID does not exist" containerID="5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.259646 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231"} err="failed to get container status \"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231\": rpc error: code = NotFound desc = could not find container \"5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231\": container with ID starting with 5f875a6718cbb98d4901ab394b9a82808752a42a6efbc77d4aef9a7f82fb1231 not found: ID does not exist" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.259674 4888 scope.go:117] "RemoveContainer" containerID="0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1" Dec 01 19:58:13 crc kubenswrapper[4888]: E1201 19:58:13.259992 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1\": container with ID starting with 0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1 not found: ID does not exist" containerID="0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1" Dec 01 19:58:13 crc kubenswrapper[4888]: I1201 19:58:13.260044 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1"} err="failed to get container status \"0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1\": rpc error: code = NotFound desc = could not find container \"0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1\": container with ID starting with 0542b5f0c81557f19d2571892cfd71ac059b55e254bc18905b311d19e8da81a1 not found: ID does not exist" Dec 01 19:58:14 crc kubenswrapper[4888]: I1201 19:58:14.461469 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" path="/var/lib/kubelet/pods/5cd3b941-2597-4c2e-9b79-d33ed6d89021/volumes" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.038097 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.038672 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.038722 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.039328 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.039387 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" gracePeriod=600 Dec 01 19:58:20 crc kubenswrapper[4888]: E1201 19:58:20.179103 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.204182 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" exitCode=0 Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.204216 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb"} Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.204289 4888 scope.go:117] "RemoveContainer" containerID="d4c2bfaf6462cc0c7520b254a32389ee0079cd1f913a1a1bfa275c5709e64fec" Dec 01 19:58:20 crc kubenswrapper[4888]: I1201 19:58:20.204902 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:58:20 crc kubenswrapper[4888]: E1201 19:58:20.205362 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:58:31 crc kubenswrapper[4888]: I1201 19:58:31.451609 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:58:31 crc kubenswrapper[4888]: E1201 19:58:31.453595 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:58:45 crc kubenswrapper[4888]: I1201 19:58:45.450879 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:58:45 crc kubenswrapper[4888]: E1201 19:58:45.451641 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:58:58 crc kubenswrapper[4888]: I1201 19:58:58.451651 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:58:58 crc kubenswrapper[4888]: E1201 19:58:58.452560 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:59:03 crc kubenswrapper[4888]: I1201 19:59:03.178388 4888 scope.go:117] "RemoveContainer" containerID="37038d0d766494b4ca0a65619d56d5d2fea5d8a61981f13a35593ff44612cd62" Dec 01 19:59:03 crc kubenswrapper[4888]: I1201 19:59:03.199639 4888 scope.go:117] "RemoveContainer" containerID="cc690ce78641323b443479779f18a10a264628d0bc41dd7f95d0884cd4ed98f2" Dec 01 19:59:03 crc kubenswrapper[4888]: I1201 19:59:03.220204 4888 scope.go:117] "RemoveContainer" containerID="0fc9a673e6cc372dda4155d983d65d01115b05454f84a11c6bfb6790ee44bde8" Dec 01 19:59:03 crc kubenswrapper[4888]: I1201 19:59:03.246240 4888 scope.go:117] "RemoveContainer" containerID="415fa8458981321241213b9080ca06d00e9bb1d35c31147d0ae1f642d1dfa39c" Dec 01 19:59:03 crc kubenswrapper[4888]: I1201 19:59:03.265310 4888 scope.go:117] "RemoveContainer" containerID="7299a937fe97cbc2ce31bd3aa342161fee45f0a040f1898e5d4644bedb568490" Dec 01 19:59:09 crc kubenswrapper[4888]: I1201 19:59:09.452208 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:59:09 crc kubenswrapper[4888]: E1201 19:59:09.453078 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.081538 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:12 crc kubenswrapper[4888]: E1201 19:59:12.082286 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="registry-server" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.082303 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="registry-server" Dec 01 19:59:12 crc kubenswrapper[4888]: E1201 19:59:12.082354 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="extract-content" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.082362 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="extract-content" Dec 01 19:59:12 crc kubenswrapper[4888]: E1201 19:59:12.082392 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="extract-utilities" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.082401 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="extract-utilities" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.082615 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cd3b941-2597-4c2e-9b79-d33ed6d89021" containerName="registry-server" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.107217 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.113279 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.165529 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.166023 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.166122 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6b57\" (UniqueName: \"kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.268821 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.268903 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.269063 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6b57\" (UniqueName: \"kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.269502 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.269577 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.288894 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6b57\" (UniqueName: \"kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57\") pod \"certified-operators-c9j5k\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.467017 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:12 crc kubenswrapper[4888]: I1201 19:59:12.942806 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:13 crc kubenswrapper[4888]: I1201 19:59:13.902879 4888 generic.go:334] "Generic (PLEG): container finished" podID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerID="d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594" exitCode=0 Dec 01 19:59:13 crc kubenswrapper[4888]: I1201 19:59:13.902986 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerDied","Data":"d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594"} Dec 01 19:59:13 crc kubenswrapper[4888]: I1201 19:59:13.903123 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerStarted","Data":"6196f56657c95229175d95434a9d6938bc36455e9c37560a8fa9d87e69b9a402"} Dec 01 19:59:15 crc kubenswrapper[4888]: I1201 19:59:15.926810 4888 generic.go:334] "Generic (PLEG): container finished" podID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerID="eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c" exitCode=0 Dec 01 19:59:15 crc kubenswrapper[4888]: I1201 19:59:15.926929 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerDied","Data":"eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c"} Dec 01 19:59:17 crc kubenswrapper[4888]: I1201 19:59:17.954734 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerStarted","Data":"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f"} Dec 01 19:59:17 crc kubenswrapper[4888]: I1201 19:59:17.976787 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c9j5k" podStartSLOduration=3.072008464 podStartE2EDuration="5.976757183s" podCreationTimestamp="2025-12-01 19:59:12 +0000 UTC" firstStartedPulling="2025-12-01 19:59:13.904580559 +0000 UTC m=+1553.775610473" lastFinishedPulling="2025-12-01 19:59:16.809329278 +0000 UTC m=+1556.680359192" observedRunningTime="2025-12-01 19:59:17.97032546 +0000 UTC m=+1557.841355394" watchObservedRunningTime="2025-12-01 19:59:17.976757183 +0000 UTC m=+1557.847787097" Dec 01 19:59:21 crc kubenswrapper[4888]: I1201 19:59:21.451163 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:59:21 crc kubenswrapper[4888]: E1201 19:59:21.452117 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:59:22 crc kubenswrapper[4888]: I1201 19:59:22.468032 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:22 crc kubenswrapper[4888]: I1201 19:59:22.468471 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:22 crc kubenswrapper[4888]: I1201 19:59:22.513501 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:23 crc kubenswrapper[4888]: I1201 19:59:23.045373 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:23 crc kubenswrapper[4888]: I1201 19:59:23.093344 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.016104 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c9j5k" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="registry-server" containerID="cri-o://8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f" gracePeriod=2 Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.460289 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.546215 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content\") pod \"71ed07d6-bc7f-46c1-92bc-04643023d103\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.546308 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities\") pod \"71ed07d6-bc7f-46c1-92bc-04643023d103\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.546405 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6b57\" (UniqueName: \"kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57\") pod \"71ed07d6-bc7f-46c1-92bc-04643023d103\" (UID: \"71ed07d6-bc7f-46c1-92bc-04643023d103\") " Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.549018 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities" (OuterVolumeSpecName: "utilities") pod "71ed07d6-bc7f-46c1-92bc-04643023d103" (UID: "71ed07d6-bc7f-46c1-92bc-04643023d103"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.555550 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57" (OuterVolumeSpecName: "kube-api-access-z6b57") pod "71ed07d6-bc7f-46c1-92bc-04643023d103" (UID: "71ed07d6-bc7f-46c1-92bc-04643023d103"). InnerVolumeSpecName "kube-api-access-z6b57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.648938 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.648974 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6b57\" (UniqueName: \"kubernetes.io/projected/71ed07d6-bc7f-46c1-92bc-04643023d103-kube-api-access-z6b57\") on node \"crc\" DevicePath \"\"" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.839924 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71ed07d6-bc7f-46c1-92bc-04643023d103" (UID: "71ed07d6-bc7f-46c1-92bc-04643023d103"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 19:59:25 crc kubenswrapper[4888]: I1201 19:59:25.852559 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71ed07d6-bc7f-46c1-92bc-04643023d103-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.027507 4888 generic.go:334] "Generic (PLEG): container finished" podID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerID="8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f" exitCode=0 Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.027552 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerDied","Data":"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f"} Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.027577 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c9j5k" event={"ID":"71ed07d6-bc7f-46c1-92bc-04643023d103","Type":"ContainerDied","Data":"6196f56657c95229175d95434a9d6938bc36455e9c37560a8fa9d87e69b9a402"} Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.027596 4888 scope.go:117] "RemoveContainer" containerID="8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.027598 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c9j5k" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.058131 4888 scope.go:117] "RemoveContainer" containerID="eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.063243 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.071843 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c9j5k"] Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.081068 4888 scope.go:117] "RemoveContainer" containerID="d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.132443 4888 scope.go:117] "RemoveContainer" containerID="8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f" Dec 01 19:59:26 crc kubenswrapper[4888]: E1201 19:59:26.132932 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f\": container with ID starting with 8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f not found: ID does not exist" containerID="8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.132975 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f"} err="failed to get container status \"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f\": rpc error: code = NotFound desc = could not find container \"8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f\": container with ID starting with 8e36aa2064590ee276a5e5ef4b2279f859cb93935aea0ea2e0c444e7d01dfc8f not found: ID does not exist" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.133007 4888 scope.go:117] "RemoveContainer" containerID="eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c" Dec 01 19:59:26 crc kubenswrapper[4888]: E1201 19:59:26.133443 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c\": container with ID starting with eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c not found: ID does not exist" containerID="eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.133481 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c"} err="failed to get container status \"eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c\": rpc error: code = NotFound desc = could not find container \"eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c\": container with ID starting with eca2e19aae3e80e333ed0a3566de7f2015b4624bbda0fa5f11c6bc10aa5b458c not found: ID does not exist" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.133502 4888 scope.go:117] "RemoveContainer" containerID="d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594" Dec 01 19:59:26 crc kubenswrapper[4888]: E1201 19:59:26.133855 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594\": container with ID starting with d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594 not found: ID does not exist" containerID="d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.133909 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594"} err="failed to get container status \"d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594\": rpc error: code = NotFound desc = could not find container \"d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594\": container with ID starting with d4a33380621e0b81604031dabe58cd1cf40518a59971e500c927891fcee48594 not found: ID does not exist" Dec 01 19:59:26 crc kubenswrapper[4888]: I1201 19:59:26.463468 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" path="/var/lib/kubelet/pods/71ed07d6-bc7f-46c1-92bc-04643023d103/volumes" Dec 01 19:59:34 crc kubenswrapper[4888]: I1201 19:59:34.452595 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:59:34 crc kubenswrapper[4888]: E1201 19:59:34.453526 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 19:59:49 crc kubenswrapper[4888]: I1201 19:59:49.451408 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 19:59:49 crc kubenswrapper[4888]: E1201 19:59:49.452177 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.146004 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk"] Dec 01 20:00:00 crc kubenswrapper[4888]: E1201 20:00:00.147036 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="extract-utilities" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.147055 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="extract-utilities" Dec 01 20:00:00 crc kubenswrapper[4888]: E1201 20:00:00.147081 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="extract-content" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.147087 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="extract-content" Dec 01 20:00:00 crc kubenswrapper[4888]: E1201 20:00:00.147117 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="registry-server" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.147124 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="registry-server" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.147363 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="71ed07d6-bc7f-46c1-92bc-04643023d103" containerName="registry-server" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.148158 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.150772 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.150772 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.159012 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk"] Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.335214 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.335333 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrntv\" (UniqueName: \"kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.335405 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.437388 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.437514 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrntv\" (UniqueName: \"kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.437595 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.438453 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.445471 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.457626 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrntv\" (UniqueName: \"kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv\") pod \"collect-profiles-29410320-qcfpk\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.479704 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:00 crc kubenswrapper[4888]: I1201 20:00:00.936612 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk"] Dec 01 20:00:01 crc kubenswrapper[4888]: I1201 20:00:01.350921 4888 generic.go:334] "Generic (PLEG): container finished" podID="690f6f48-b606-4db9-b569-3bfbec27a013" containerID="ae17c5455f1b9402ed9930ae1cf7b00fbeeaea29a27755011351dd4af56719ff" exitCode=0 Dec 01 20:00:01 crc kubenswrapper[4888]: I1201 20:00:01.351040 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" event={"ID":"690f6f48-b606-4db9-b569-3bfbec27a013","Type":"ContainerDied","Data":"ae17c5455f1b9402ed9930ae1cf7b00fbeeaea29a27755011351dd4af56719ff"} Dec 01 20:00:01 crc kubenswrapper[4888]: I1201 20:00:01.351355 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" event={"ID":"690f6f48-b606-4db9-b569-3bfbec27a013","Type":"ContainerStarted","Data":"8ced8b176c217567ae189da4f52e69456913a9aae77bbeea2469daccff284477"} Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.453064 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:00:02 crc kubenswrapper[4888]: E1201 20:00:02.453402 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.680316 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.783852 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume\") pod \"690f6f48-b606-4db9-b569-3bfbec27a013\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.784027 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume\") pod \"690f6f48-b606-4db9-b569-3bfbec27a013\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.784905 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrntv\" (UniqueName: \"kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv\") pod \"690f6f48-b606-4db9-b569-3bfbec27a013\" (UID: \"690f6f48-b606-4db9-b569-3bfbec27a013\") " Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.784890 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume" (OuterVolumeSpecName: "config-volume") pod "690f6f48-b606-4db9-b569-3bfbec27a013" (UID: "690f6f48-b606-4db9-b569-3bfbec27a013"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.785415 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/690f6f48-b606-4db9-b569-3bfbec27a013-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.790175 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "690f6f48-b606-4db9-b569-3bfbec27a013" (UID: "690f6f48-b606-4db9-b569-3bfbec27a013"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.790366 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv" (OuterVolumeSpecName: "kube-api-access-jrntv") pod "690f6f48-b606-4db9-b569-3bfbec27a013" (UID: "690f6f48-b606-4db9-b569-3bfbec27a013"). InnerVolumeSpecName "kube-api-access-jrntv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.887122 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrntv\" (UniqueName: \"kubernetes.io/projected/690f6f48-b606-4db9-b569-3bfbec27a013-kube-api-access-jrntv\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:02 crc kubenswrapper[4888]: I1201 20:00:02.887166 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/690f6f48-b606-4db9-b569-3bfbec27a013-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:00:03 crc kubenswrapper[4888]: I1201 20:00:03.335150 4888 scope.go:117] "RemoveContainer" containerID="53a33fe78c3c4d3fc20837f0e399a847af8b566e36f032513d2db9c50fbf7a0b" Dec 01 20:00:03 crc kubenswrapper[4888]: I1201 20:00:03.357239 4888 scope.go:117] "RemoveContainer" containerID="de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f" Dec 01 20:00:03 crc kubenswrapper[4888]: I1201 20:00:03.376808 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" Dec 01 20:00:03 crc kubenswrapper[4888]: I1201 20:00:03.376804 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk" event={"ID":"690f6f48-b606-4db9-b569-3bfbec27a013","Type":"ContainerDied","Data":"8ced8b176c217567ae189da4f52e69456913a9aae77bbeea2469daccff284477"} Dec 01 20:00:03 crc kubenswrapper[4888]: I1201 20:00:03.376996 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ced8b176c217567ae189da4f52e69456913a9aae77bbeea2469daccff284477" Dec 01 20:00:03 crc kubenswrapper[4888]: E1201 20:00:03.379818 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f\": container with ID starting with de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f not found: ID does not exist" containerID="de93e7583d3223706ccd4e2841c3a828d137c0009e63e1fc871875712070b36f" Dec 01 20:00:13 crc kubenswrapper[4888]: I1201 20:00:13.451783 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:00:13 crc kubenswrapper[4888]: E1201 20:00:13.452491 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:00:25 crc kubenswrapper[4888]: I1201 20:00:25.452122 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:00:25 crc kubenswrapper[4888]: E1201 20:00:25.452906 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.057524 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-8194-account-create-update-djj8n"] Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.074147 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-r72mz"] Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.084132 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-8194-account-create-update-djj8n"] Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.092595 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-r72mz"] Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.461531 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:00:40 crc kubenswrapper[4888]: E1201 20:00:40.461837 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.462661 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64d69349-ac8d-448b-af24-f47de7982fba" path="/var/lib/kubelet/pods/64d69349-ac8d-448b-af24-f47de7982fba/volumes" Dec 01 20:00:40 crc kubenswrapper[4888]: I1201 20:00:40.464334 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78e1c942-cc18-4fb3-a287-137fb7b4f309" path="/var/lib/kubelet/pods/78e1c942-cc18-4fb3-a287-137fb7b4f309/volumes" Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.076028 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-84cpp"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.096172 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-84cpp"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.109077 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-4fdb-account-create-update-7btll"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.117639 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-fvx9l"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.125838 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-4fdb-account-create-update-7btll"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.135047 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-fvx9l"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.144006 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-1d48-account-create-update-plfkf"] Dec 01 20:00:45 crc kubenswrapper[4888]: I1201 20:00:45.153102 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-1d48-account-create-update-plfkf"] Dec 01 20:00:46 crc kubenswrapper[4888]: I1201 20:00:46.462651 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5235c1b8-6bf6-485b-add3-05ef29c9178d" path="/var/lib/kubelet/pods/5235c1b8-6bf6-485b-add3-05ef29c9178d/volumes" Dec 01 20:00:46 crc kubenswrapper[4888]: I1201 20:00:46.463714 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8167f98f-356f-4fae-8945-96e7c0ab8c47" path="/var/lib/kubelet/pods/8167f98f-356f-4fae-8945-96e7c0ab8c47/volumes" Dec 01 20:00:46 crc kubenswrapper[4888]: I1201 20:00:46.465289 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f" path="/var/lib/kubelet/pods/84a8c1e9-3de0-4547-bbcb-5d4776b8ad2f/volumes" Dec 01 20:00:46 crc kubenswrapper[4888]: I1201 20:00:46.466842 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9267a3d-f044-4529-b9f7-d7de6088819e" path="/var/lib/kubelet/pods/e9267a3d-f044-4529-b9f7-d7de6088819e/volumes" Dec 01 20:00:55 crc kubenswrapper[4888]: I1201 20:00:55.451681 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:00:55 crc kubenswrapper[4888]: E1201 20:00:55.452783 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.155305 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29410321-t5grt"] Dec 01 20:01:00 crc kubenswrapper[4888]: E1201 20:01:00.156146 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="690f6f48-b606-4db9-b569-3bfbec27a013" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.156160 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="690f6f48-b606-4db9-b569-3bfbec27a013" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.156389 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="690f6f48-b606-4db9-b569-3bfbec27a013" containerName="collect-profiles" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.157001 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.177153 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410321-t5grt"] Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.321280 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.321348 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.321945 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9b2t\" (UniqueName: \"kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.322084 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.424661 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.424730 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.424780 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9b2t\" (UniqueName: \"kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.424815 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.431554 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.431842 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.434734 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.440976 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9b2t\" (UniqueName: \"kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t\") pod \"keystone-cron-29410321-t5grt\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.488958 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.922846 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29410321-t5grt"] Dec 01 20:01:00 crc kubenswrapper[4888]: I1201 20:01:00.984524 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-t5grt" event={"ID":"b059b2fe-58fd-46d6-8da6-ce215b31283a","Type":"ContainerStarted","Data":"eaf96286e6c8840e082f4c9f40c822b0d74b0aeaa8d946ca7499436b6c9c4396"} Dec 01 20:01:01 crc kubenswrapper[4888]: I1201 20:01:01.997098 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-t5grt" event={"ID":"b059b2fe-58fd-46d6-8da6-ce215b31283a","Type":"ContainerStarted","Data":"d76d1d26d018565ae97b50e6feaf6b1ba4cae77812fa2f068d0fcf55ec524187"} Dec 01 20:01:02 crc kubenswrapper[4888]: I1201 20:01:02.018315 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29410321-t5grt" podStartSLOduration=2.018295839 podStartE2EDuration="2.018295839s" podCreationTimestamp="2025-12-01 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:01:02.012567306 +0000 UTC m=+1661.883597220" watchObservedRunningTime="2025-12-01 20:01:02.018295839 +0000 UTC m=+1661.889325753" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.430051 4888 scope.go:117] "RemoveContainer" containerID="4247c72604821509a70c03a465aafd101626086f032db0b47585d5b81fb6ba06" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.455943 4888 scope.go:117] "RemoveContainer" containerID="dd64ea97811dc1e5b5fe3ad5721f00554f2380762733c3122d9094ea5d3b8714" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.526297 4888 scope.go:117] "RemoveContainer" containerID="35034bb595f0a43e09c5e0a2b582b0a9c18910ad541a890e07892b3ca8002a4f" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.556589 4888 scope.go:117] "RemoveContainer" containerID="a51f5095682b3c74f409fcc6a74e6849a8106fa25fbc5f67b3f302466c205227" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.598376 4888 scope.go:117] "RemoveContainer" containerID="78c64fb384f96a6de358875dec7c8ec4dee6d03474f40db69ed960bdeb0f2646" Dec 01 20:01:03 crc kubenswrapper[4888]: I1201 20:01:03.640128 4888 scope.go:117] "RemoveContainer" containerID="24fd61513984406e5592a0ee025a31418e4b688ea2962e302f55e76bfb5657b1" Dec 01 20:01:04 crc kubenswrapper[4888]: I1201 20:01:04.042265 4888 generic.go:334] "Generic (PLEG): container finished" podID="b059b2fe-58fd-46d6-8da6-ce215b31283a" containerID="d76d1d26d018565ae97b50e6feaf6b1ba4cae77812fa2f068d0fcf55ec524187" exitCode=0 Dec 01 20:01:04 crc kubenswrapper[4888]: I1201 20:01:04.042283 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-t5grt" event={"ID":"b059b2fe-58fd-46d6-8da6-ce215b31283a","Type":"ContainerDied","Data":"d76d1d26d018565ae97b50e6feaf6b1ba4cae77812fa2f068d0fcf55ec524187"} Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.399743 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.515815 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys\") pod \"b059b2fe-58fd-46d6-8da6-ce215b31283a\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.515872 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9b2t\" (UniqueName: \"kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t\") pod \"b059b2fe-58fd-46d6-8da6-ce215b31283a\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.516001 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data\") pod \"b059b2fe-58fd-46d6-8da6-ce215b31283a\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.516033 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle\") pod \"b059b2fe-58fd-46d6-8da6-ce215b31283a\" (UID: \"b059b2fe-58fd-46d6-8da6-ce215b31283a\") " Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.527869 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b059b2fe-58fd-46d6-8da6-ce215b31283a" (UID: "b059b2fe-58fd-46d6-8da6-ce215b31283a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.527882 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t" (OuterVolumeSpecName: "kube-api-access-s9b2t") pod "b059b2fe-58fd-46d6-8da6-ce215b31283a" (UID: "b059b2fe-58fd-46d6-8da6-ce215b31283a"). InnerVolumeSpecName "kube-api-access-s9b2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.545970 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b059b2fe-58fd-46d6-8da6-ce215b31283a" (UID: "b059b2fe-58fd-46d6-8da6-ce215b31283a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.568517 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data" (OuterVolumeSpecName: "config-data") pod "b059b2fe-58fd-46d6-8da6-ce215b31283a" (UID: "b059b2fe-58fd-46d6-8da6-ce215b31283a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.618267 4888 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.618314 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9b2t\" (UniqueName: \"kubernetes.io/projected/b059b2fe-58fd-46d6-8da6-ce215b31283a-kube-api-access-s9b2t\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.618328 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:05 crc kubenswrapper[4888]: I1201 20:01:05.618339 4888 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b059b2fe-58fd-46d6-8da6-ce215b31283a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:06 crc kubenswrapper[4888]: I1201 20:01:06.070993 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29410321-t5grt" event={"ID":"b059b2fe-58fd-46d6-8da6-ce215b31283a","Type":"ContainerDied","Data":"eaf96286e6c8840e082f4c9f40c822b0d74b0aeaa8d946ca7499436b6c9c4396"} Dec 01 20:01:06 crc kubenswrapper[4888]: I1201 20:01:06.071055 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaf96286e6c8840e082f4c9f40c822b0d74b0aeaa8d946ca7499436b6c9c4396" Dec 01 20:01:06 crc kubenswrapper[4888]: I1201 20:01:06.071085 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29410321-t5grt" Dec 01 20:01:06 crc kubenswrapper[4888]: I1201 20:01:06.460946 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:01:06 crc kubenswrapper[4888]: E1201 20:01:06.461238 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:01:19 crc kubenswrapper[4888]: I1201 20:01:19.450998 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:01:19 crc kubenswrapper[4888]: E1201 20:01:19.452726 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.050090 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-mnpjt"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.060576 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-2dggz"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.071592 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-jlw2t"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.079511 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-97e5-account-create-update-4qnck"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.087742 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-mnpjt"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.096347 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-cd73-account-create-update-qrpvz"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.104139 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-jlw2t"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.116033 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-97e5-account-create-update-4qnck"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.123778 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-e18c-account-create-update-pvcwx"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.132023 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-cd73-account-create-update-qrpvz"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.139573 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-e18c-account-create-update-pvcwx"] Dec 01 20:01:21 crc kubenswrapper[4888]: I1201 20:01:21.149403 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-2dggz"] Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.465925 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a350005-42be-4ad0-8996-3ac3a5808a79" path="/var/lib/kubelet/pods/1a350005-42be-4ad0-8996-3ac3a5808a79/volumes" Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.467362 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37be0244-881b-469e-9dd3-5f9d5b38f042" path="/var/lib/kubelet/pods/37be0244-881b-469e-9dd3-5f9d5b38f042/volumes" Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.468533 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57fd71b5-5871-4427-9c7f-3c0c7b9fa47a" path="/var/lib/kubelet/pods/57fd71b5-5871-4427-9c7f-3c0c7b9fa47a/volumes" Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.469485 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67be08f6-80ec-4816-8670-40bd598ac820" path="/var/lib/kubelet/pods/67be08f6-80ec-4816-8670-40bd598ac820/volumes" Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.470889 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a21e9bd-e339-4d46-aaed-1ed1d4cfe933" path="/var/lib/kubelet/pods/7a21e9bd-e339-4d46-aaed-1ed1d4cfe933/volumes" Dec 01 20:01:22 crc kubenswrapper[4888]: I1201 20:01:22.472124 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f794204-9db5-498d-b8a5-586ec3b9f921" path="/var/lib/kubelet/pods/7f794204-9db5-498d-b8a5-586ec3b9f921/volumes" Dec 01 20:01:30 crc kubenswrapper[4888]: I1201 20:01:30.031708 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-vcl88"] Dec 01 20:01:30 crc kubenswrapper[4888]: I1201 20:01:30.042115 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-vcl88"] Dec 01 20:01:30 crc kubenswrapper[4888]: I1201 20:01:30.465382 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b85ed1be-1f93-4247-ae07-5c08ecbb6802" path="/var/lib/kubelet/pods/b85ed1be-1f93-4247-ae07-5c08ecbb6802/volumes" Dec 01 20:01:31 crc kubenswrapper[4888]: I1201 20:01:31.029579 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-2l5hv"] Dec 01 20:01:31 crc kubenswrapper[4888]: I1201 20:01:31.039010 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-2l5hv"] Dec 01 20:01:31 crc kubenswrapper[4888]: I1201 20:01:31.452511 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:01:31 crc kubenswrapper[4888]: E1201 20:01:31.453137 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:01:32 crc kubenswrapper[4888]: I1201 20:01:32.469211 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="843bf3c4-e095-466a-b4f3-5f48b85dc179" path="/var/lib/kubelet/pods/843bf3c4-e095-466a-b4f3-5f48b85dc179/volumes" Dec 01 20:01:34 crc kubenswrapper[4888]: I1201 20:01:34.306455 4888 generic.go:334] "Generic (PLEG): container finished" podID="04974f6b-2545-433f-907d-5f97024057d4" containerID="7d60fc6a06098d78d5924dc37653bbccdea544b67ee9ac9ee8f563d5ec0b3d2b" exitCode=0 Dec 01 20:01:34 crc kubenswrapper[4888]: I1201 20:01:34.306761 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" event={"ID":"04974f6b-2545-433f-907d-5f97024057d4","Type":"ContainerDied","Data":"7d60fc6a06098d78d5924dc37653bbccdea544b67ee9ac9ee8f563d5ec0b3d2b"} Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.720295 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.882943 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key\") pod \"04974f6b-2545-433f-907d-5f97024057d4\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.883275 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle\") pod \"04974f6b-2545-433f-907d-5f97024057d4\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.883343 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8qlx\" (UniqueName: \"kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx\") pod \"04974f6b-2545-433f-907d-5f97024057d4\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.883399 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory\") pod \"04974f6b-2545-433f-907d-5f97024057d4\" (UID: \"04974f6b-2545-433f-907d-5f97024057d4\") " Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.897432 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "04974f6b-2545-433f-907d-5f97024057d4" (UID: "04974f6b-2545-433f-907d-5f97024057d4"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.905479 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx" (OuterVolumeSpecName: "kube-api-access-n8qlx") pod "04974f6b-2545-433f-907d-5f97024057d4" (UID: "04974f6b-2545-433f-907d-5f97024057d4"). InnerVolumeSpecName "kube-api-access-n8qlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.949214 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory" (OuterVolumeSpecName: "inventory") pod "04974f6b-2545-433f-907d-5f97024057d4" (UID: "04974f6b-2545-433f-907d-5f97024057d4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.973858 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "04974f6b-2545-433f-907d-5f97024057d4" (UID: "04974f6b-2545-433f-907d-5f97024057d4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.985546 4888 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.985589 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8qlx\" (UniqueName: \"kubernetes.io/projected/04974f6b-2545-433f-907d-5f97024057d4-kube-api-access-n8qlx\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.985599 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:35 crc kubenswrapper[4888]: I1201 20:01:35.985607 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/04974f6b-2545-433f-907d-5f97024057d4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.326878 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" event={"ID":"04974f6b-2545-433f-907d-5f97024057d4","Type":"ContainerDied","Data":"9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b"} Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.327513 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9310e2d791cd3a2e700d1b750b27021218ee87bfb0f59433f25202fcbd91617b" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.327147 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.398128 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb"] Dec 01 20:01:36 crc kubenswrapper[4888]: E1201 20:01:36.398665 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04974f6b-2545-433f-907d-5f97024057d4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.398692 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="04974f6b-2545-433f-907d-5f97024057d4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:01:36 crc kubenswrapper[4888]: E1201 20:01:36.398714 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b059b2fe-58fd-46d6-8da6-ce215b31283a" containerName="keystone-cron" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.398722 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b059b2fe-58fd-46d6-8da6-ce215b31283a" containerName="keystone-cron" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.398975 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b059b2fe-58fd-46d6-8da6-ce215b31283a" containerName="keystone-cron" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.399006 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="04974f6b-2545-433f-907d-5f97024057d4" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.399798 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.409739 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.409829 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.409733 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.410381 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.420640 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb"] Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.495584 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.495975 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.496256 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.598335 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.598521 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.598676 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.602434 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.605052 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.615556 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lknxb\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:36 crc kubenswrapper[4888]: I1201 20:01:36.764717 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:01:37 crc kubenswrapper[4888]: I1201 20:01:37.253106 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:01:37 crc kubenswrapper[4888]: I1201 20:01:37.256633 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb"] Dec 01 20:01:37 crc kubenswrapper[4888]: I1201 20:01:37.337844 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" event={"ID":"f547ee6c-51cc-47cb-b6c8-2df4311039b2","Type":"ContainerStarted","Data":"cae26cd2c02aef4ef9bba81449fbfa97d8cedce4d05f7370ed9b867f13656d37"} Dec 01 20:01:38 crc kubenswrapper[4888]: I1201 20:01:38.347291 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" event={"ID":"f547ee6c-51cc-47cb-b6c8-2df4311039b2","Type":"ContainerStarted","Data":"ea53f4e870cf7e5bb97c4eac3175138fdc1c640f10cff7ea88df84cd3164879a"} Dec 01 20:01:38 crc kubenswrapper[4888]: I1201 20:01:38.377268 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" podStartSLOduration=1.753510403 podStartE2EDuration="2.377247622s" podCreationTimestamp="2025-12-01 20:01:36 +0000 UTC" firstStartedPulling="2025-12-01 20:01:37.252892556 +0000 UTC m=+1697.123922480" lastFinishedPulling="2025-12-01 20:01:37.876629785 +0000 UTC m=+1697.747659699" observedRunningTime="2025-12-01 20:01:38.361095802 +0000 UTC m=+1698.232125716" watchObservedRunningTime="2025-12-01 20:01:38.377247622 +0000 UTC m=+1698.248277536" Dec 01 20:01:43 crc kubenswrapper[4888]: I1201 20:01:43.451148 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:01:43 crc kubenswrapper[4888]: E1201 20:01:43.451934 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:01:57 crc kubenswrapper[4888]: I1201 20:01:57.452595 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:01:57 crc kubenswrapper[4888]: E1201 20:01:57.453904 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.054104 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-m7g88"] Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.064940 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-m7g88"] Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.793551 4888 scope.go:117] "RemoveContainer" containerID="e5a864fdabd4c395f3d4230c08f3326ac1f41f695be82a7f9ecbc2d15ca4aea1" Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.817946 4888 scope.go:117] "RemoveContainer" containerID="707dce5fdcba6fbb943150e7273574bbd155231f2307395a44e8268309d81a79" Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.887434 4888 scope.go:117] "RemoveContainer" containerID="67dc3451df3b27852e504d44702309c8e4676e6f7b8ad029fa8d57c8c216ae88" Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.923834 4888 scope.go:117] "RemoveContainer" containerID="381b9f5fcda2723e8eadc8faa24fe3a22785c1e9654d452b34e61c8817b8deda" Dec 01 20:02:03 crc kubenswrapper[4888]: I1201 20:02:03.957227 4888 scope.go:117] "RemoveContainer" containerID="ac2c12e010d8904e43ed20af8c8b8879f366820cb82bb208beb41726d26bdd66" Dec 01 20:02:04 crc kubenswrapper[4888]: I1201 20:02:04.008279 4888 scope.go:117] "RemoveContainer" containerID="accb72ab357e4e7848bb0fcfae7958c63777bce22ec2d68469d45283c4c17a5d" Dec 01 20:02:04 crc kubenswrapper[4888]: I1201 20:02:04.075536 4888 scope.go:117] "RemoveContainer" containerID="61d5e9e2419743db0065411b117b4b013b8e88201d7f82768646ee2c97dc6fb6" Dec 01 20:02:04 crc kubenswrapper[4888]: I1201 20:02:04.094828 4888 scope.go:117] "RemoveContainer" containerID="66e3f4fbc0dd1e564ab2c0d652366503e8cf58d4b765568a9ba240822941543a" Dec 01 20:02:04 crc kubenswrapper[4888]: I1201 20:02:04.472255 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b19201ab-fb2d-4011-aa2f-a078153687d1" path="/var/lib/kubelet/pods/b19201ab-fb2d-4011-aa2f-a078153687d1/volumes" Dec 01 20:02:09 crc kubenswrapper[4888]: I1201 20:02:09.031334 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-8dw7s"] Dec 01 20:02:09 crc kubenswrapper[4888]: I1201 20:02:09.042885 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-8dw7s"] Dec 01 20:02:09 crc kubenswrapper[4888]: I1201 20:02:09.451749 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:02:09 crc kubenswrapper[4888]: E1201 20:02:09.452306 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:02:10 crc kubenswrapper[4888]: I1201 20:02:10.462420 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4b06642-351e-4bc5-b48e-ab8b6ddf750c" path="/var/lib/kubelet/pods/c4b06642-351e-4bc5-b48e-ab8b6ddf750c/volumes" Dec 01 20:02:16 crc kubenswrapper[4888]: I1201 20:02:16.046464 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-54q8p"] Dec 01 20:02:16 crc kubenswrapper[4888]: I1201 20:02:16.055850 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-54q8p"] Dec 01 20:02:16 crc kubenswrapper[4888]: I1201 20:02:16.462747 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9337539-212d-4ad9-9572-80712d40784d" path="/var/lib/kubelet/pods/a9337539-212d-4ad9-9572-80712d40784d/volumes" Dec 01 20:02:22 crc kubenswrapper[4888]: I1201 20:02:22.451834 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:02:22 crc kubenswrapper[4888]: E1201 20:02:22.453222 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:02:30 crc kubenswrapper[4888]: I1201 20:02:30.033629 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-xk69h"] Dec 01 20:02:30 crc kubenswrapper[4888]: I1201 20:02:30.051747 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-xk69h"] Dec 01 20:02:30 crc kubenswrapper[4888]: I1201 20:02:30.464746 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="047835ab-4a66-4ff8-9252-c9c5ca0d0352" path="/var/lib/kubelet/pods/047835ab-4a66-4ff8-9252-c9c5ca0d0352/volumes" Dec 01 20:02:32 crc kubenswrapper[4888]: I1201 20:02:32.032465 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-tnc9l"] Dec 01 20:02:32 crc kubenswrapper[4888]: I1201 20:02:32.043626 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-tnc9l"] Dec 01 20:02:32 crc kubenswrapper[4888]: I1201 20:02:32.461424 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e867ec9b-1972-4745-8dea-944cc62c6db5" path="/var/lib/kubelet/pods/e867ec9b-1972-4745-8dea-944cc62c6db5/volumes" Dec 01 20:02:34 crc kubenswrapper[4888]: I1201 20:02:34.451630 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:02:34 crc kubenswrapper[4888]: E1201 20:02:34.453311 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:02:45 crc kubenswrapper[4888]: I1201 20:02:45.451577 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:02:45 crc kubenswrapper[4888]: E1201 20:02:45.452412 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:02:56 crc kubenswrapper[4888]: I1201 20:02:56.451565 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:02:56 crc kubenswrapper[4888]: E1201 20:02:56.452515 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:03:04 crc kubenswrapper[4888]: I1201 20:03:04.264035 4888 scope.go:117] "RemoveContainer" containerID="21d149c86b7473dc1c67882266bd7044406705d15a6287d62e5733d7955d4b5f" Dec 01 20:03:04 crc kubenswrapper[4888]: I1201 20:03:04.319509 4888 scope.go:117] "RemoveContainer" containerID="b81cebe4ed41c65090024b191d59b7d1e00a2e5623fc1fbad0d03089f71776bf" Dec 01 20:03:04 crc kubenswrapper[4888]: I1201 20:03:04.383337 4888 scope.go:117] "RemoveContainer" containerID="a47bfb9686a24a8042cb92fc6e06f13dc106b026bf9c2dbc88683399fe34a64b" Dec 01 20:03:04 crc kubenswrapper[4888]: I1201 20:03:04.413634 4888 scope.go:117] "RemoveContainer" containerID="35e2de823012c9ae66ff9e1704d6fa1805e591772a3eeef43db4db76756c9c54" Dec 01 20:03:04 crc kubenswrapper[4888]: I1201 20:03:04.456090 4888 scope.go:117] "RemoveContainer" containerID="503c0e5e504a0a5afdd90463196b3c8820f4b717bc53ecb3ecebc6692ecf33bd" Dec 01 20:03:10 crc kubenswrapper[4888]: I1201 20:03:10.460897 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:03:10 crc kubenswrapper[4888]: E1201 20:03:10.462366 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:03:21 crc kubenswrapper[4888]: I1201 20:03:21.451460 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:03:22 crc kubenswrapper[4888]: I1201 20:03:22.394502 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c"} Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.050774 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-9cnzd"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.062344 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9ee4-account-create-update-5rm9g"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.075553 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0ad5-account-create-update-jmqp8"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.084855 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9be2-account-create-update-4dzgs"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.095470 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-q4zxq"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.102925 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-9cnzd"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.110680 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9be2-account-create-update-4dzgs"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.117842 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0ad5-account-create-update-jmqp8"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.125654 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9ee4-account-create-update-5rm9g"] Dec 01 20:03:23 crc kubenswrapper[4888]: I1201 20:03:23.133845 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-q4zxq"] Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.029520 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-nwdjg"] Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.038594 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-nwdjg"] Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.462675 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5908ab4f-64fe-466c-b085-0c70ca92a868" path="/var/lib/kubelet/pods/5908ab4f-64fe-466c-b085-0c70ca92a868/volumes" Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.463301 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f05189a-2b40-4e96-bc85-1b23401fc9d9" path="/var/lib/kubelet/pods/5f05189a-2b40-4e96-bc85-1b23401fc9d9/volumes" Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.463850 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7528663a-6635-4fe0-8840-d0d0601799ce" path="/var/lib/kubelet/pods/7528663a-6635-4fe0-8840-d0d0601799ce/volumes" Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.464491 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91704c86-8e91-4bf0-8ee0-def68c8c321a" path="/var/lib/kubelet/pods/91704c86-8e91-4bf0-8ee0-def68c8c321a/volumes" Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.465688 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d072d085-14a2-4137-a9a1-29882ab4fe55" path="/var/lib/kubelet/pods/d072d085-14a2-4137-a9a1-29882ab4fe55/volumes" Dec 01 20:03:24 crc kubenswrapper[4888]: I1201 20:03:24.466271 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e53eb8db-1d23-4aac-85d2-36f1008834bb" path="/var/lib/kubelet/pods/e53eb8db-1d23-4aac-85d2-36f1008834bb/volumes" Dec 01 20:03:49 crc kubenswrapper[4888]: I1201 20:03:49.049111 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vmhn6"] Dec 01 20:03:49 crc kubenswrapper[4888]: I1201 20:03:49.062021 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vmhn6"] Dec 01 20:03:50 crc kubenswrapper[4888]: I1201 20:03:50.465813 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c3317b0-1533-44ad-81c0-e0b0b150fa91" path="/var/lib/kubelet/pods/2c3317b0-1533-44ad-81c0-e0b0b150fa91/volumes" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.596482 4888 scope.go:117] "RemoveContainer" containerID="c5c4222a640131fbbbb14d210300042be26d7b4c86b178394d6c3de5dc81f73b" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.620912 4888 scope.go:117] "RemoveContainer" containerID="5688aa7a2c95c898b381703f839ca94238e217d5a8e7f1f66c15de4c4436e353" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.671574 4888 scope.go:117] "RemoveContainer" containerID="eb04160056694df50ee360c0cfa6f5f561a83bd3c2dc58e6632472dc910706d8" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.714002 4888 scope.go:117] "RemoveContainer" containerID="d789933eb23391062144ecccdd3b256dd69de3c76157aab36af8cde91f873f9e" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.756599 4888 scope.go:117] "RemoveContainer" containerID="dac2766fc9b1fc690e4c31dc4a3ce48f1727e9eccfe592530996b697542f8947" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.805979 4888 scope.go:117] "RemoveContainer" containerID="b78445b46a25327bd95d6b08a26dbacdcd0a4edc56487cbe9ec598f9b6d6e5f3" Dec 01 20:04:04 crc kubenswrapper[4888]: I1201 20:04:04.848024 4888 scope.go:117] "RemoveContainer" containerID="78afded45ea42ff0e084d6d38f874cd034a8d8fea965f75db8ff1068fd99d038" Dec 01 20:04:07 crc kubenswrapper[4888]: I1201 20:04:07.030263 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-q4d97"] Dec 01 20:04:07 crc kubenswrapper[4888]: I1201 20:04:07.059219 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-q4d97"] Dec 01 20:04:08 crc kubenswrapper[4888]: I1201 20:04:08.034291 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-659wd"] Dec 01 20:04:08 crc kubenswrapper[4888]: I1201 20:04:08.049018 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-659wd"] Dec 01 20:04:08 crc kubenswrapper[4888]: I1201 20:04:08.463322 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e1aa47b-6022-4533-91e7-e6108f9e7b63" path="/var/lib/kubelet/pods/2e1aa47b-6022-4533-91e7-e6108f9e7b63/volumes" Dec 01 20:04:08 crc kubenswrapper[4888]: I1201 20:04:08.463907 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5009fe9-671f-4c13-9c74-45d61ab93ca0" path="/var/lib/kubelet/pods/c5009fe9-671f-4c13-9c74-45d61ab93ca0/volumes" Dec 01 20:04:15 crc kubenswrapper[4888]: I1201 20:04:15.851393 4888 generic.go:334] "Generic (PLEG): container finished" podID="f547ee6c-51cc-47cb-b6c8-2df4311039b2" containerID="ea53f4e870cf7e5bb97c4eac3175138fdc1c640f10cff7ea88df84cd3164879a" exitCode=0 Dec 01 20:04:15 crc kubenswrapper[4888]: I1201 20:04:15.851545 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" event={"ID":"f547ee6c-51cc-47cb-b6c8-2df4311039b2","Type":"ContainerDied","Data":"ea53f4e870cf7e5bb97c4eac3175138fdc1c640f10cff7ea88df84cd3164879a"} Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.355260 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.547428 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj\") pod \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.547611 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key\") pod \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.547642 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory\") pod \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\" (UID: \"f547ee6c-51cc-47cb-b6c8-2df4311039b2\") " Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.555585 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj" (OuterVolumeSpecName: "kube-api-access-9jsjj") pod "f547ee6c-51cc-47cb-b6c8-2df4311039b2" (UID: "f547ee6c-51cc-47cb-b6c8-2df4311039b2"). InnerVolumeSpecName "kube-api-access-9jsjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.576647 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f547ee6c-51cc-47cb-b6c8-2df4311039b2" (UID: "f547ee6c-51cc-47cb-b6c8-2df4311039b2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.577152 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory" (OuterVolumeSpecName: "inventory") pod "f547ee6c-51cc-47cb-b6c8-2df4311039b2" (UID: "f547ee6c-51cc-47cb-b6c8-2df4311039b2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.649756 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jsjj\" (UniqueName: \"kubernetes.io/projected/f547ee6c-51cc-47cb-b6c8-2df4311039b2-kube-api-access-9jsjj\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.649787 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.649796 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f547ee6c-51cc-47cb-b6c8-2df4311039b2-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.874388 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" event={"ID":"f547ee6c-51cc-47cb-b6c8-2df4311039b2","Type":"ContainerDied","Data":"cae26cd2c02aef4ef9bba81449fbfa97d8cedce4d05f7370ed9b867f13656d37"} Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.874669 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cae26cd2c02aef4ef9bba81449fbfa97d8cedce4d05f7370ed9b867f13656d37" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.874505 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lknxb" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.962018 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd"] Dec 01 20:04:17 crc kubenswrapper[4888]: E1201 20:04:17.963410 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f547ee6c-51cc-47cb-b6c8-2df4311039b2" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.963441 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f547ee6c-51cc-47cb-b6c8-2df4311039b2" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.963676 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f547ee6c-51cc-47cb-b6c8-2df4311039b2" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.964529 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:17 crc kubenswrapper[4888]: I1201 20:04:17.972298 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd"] Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.016915 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.017544 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.017765 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.018055 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.159825 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.160856 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.160972 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbhhl\" (UniqueName: \"kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.262333 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbhhl\" (UniqueName: \"kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.262418 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.262513 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.267385 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.267776 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.280686 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbhhl\" (UniqueName: \"kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.335327 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.852697 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd"] Dec 01 20:04:18 crc kubenswrapper[4888]: I1201 20:04:18.885874 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" event={"ID":"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c","Type":"ContainerStarted","Data":"9195fd4fa07fe410eb80534fd2a9245e3c1eb99fc5a387ccc9934e0198f5241f"} Dec 01 20:04:19 crc kubenswrapper[4888]: I1201 20:04:19.897049 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" event={"ID":"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c","Type":"ContainerStarted","Data":"7a7b6fe4d33f100c733897e87c4bd1079184da68bc3edab5dffb870975b8f622"} Dec 01 20:04:19 crc kubenswrapper[4888]: I1201 20:04:19.920547 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" podStartSLOduration=2.2911867839999998 podStartE2EDuration="2.92052745s" podCreationTimestamp="2025-12-01 20:04:17 +0000 UTC" firstStartedPulling="2025-12-01 20:04:18.854518359 +0000 UTC m=+1858.725548273" lastFinishedPulling="2025-12-01 20:04:19.483859025 +0000 UTC m=+1859.354888939" observedRunningTime="2025-12-01 20:04:19.915394456 +0000 UTC m=+1859.786424380" watchObservedRunningTime="2025-12-01 20:04:19.92052745 +0000 UTC m=+1859.791557364" Dec 01 20:04:52 crc kubenswrapper[4888]: I1201 20:04:52.038753 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-l77vw"] Dec 01 20:04:52 crc kubenswrapper[4888]: I1201 20:04:52.046791 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-l77vw"] Dec 01 20:04:52 crc kubenswrapper[4888]: I1201 20:04:52.462116 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea443d4-4a6d-47df-9839-7108ffb3d4bc" path="/var/lib/kubelet/pods/eea443d4-4a6d-47df-9839-7108ffb3d4bc/volumes" Dec 01 20:05:05 crc kubenswrapper[4888]: I1201 20:05:05.000492 4888 scope.go:117] "RemoveContainer" containerID="d1a793ae03c91d8ddda052feef2f2aa551fc8bf149f4fc0f8f4888f06fea49fb" Dec 01 20:05:05 crc kubenswrapper[4888]: I1201 20:05:05.068880 4888 scope.go:117] "RemoveContainer" containerID="68ff2eb8ba45911c903718565d6de350e4e1f06b3cd5124cb7196b39a5e528e3" Dec 01 20:05:05 crc kubenswrapper[4888]: I1201 20:05:05.129157 4888 scope.go:117] "RemoveContainer" containerID="c213a0eb8684dcc9949d8b22ede26cfa17d51537ec00947b5b4eeaa54a5d484f" Dec 01 20:05:49 crc kubenswrapper[4888]: I1201 20:05:49.643349 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" containerID="7a7b6fe4d33f100c733897e87c4bd1079184da68bc3edab5dffb870975b8f622" exitCode=0 Dec 01 20:05:49 crc kubenswrapper[4888]: I1201 20:05:49.643518 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" event={"ID":"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c","Type":"ContainerDied","Data":"7a7b6fe4d33f100c733897e87c4bd1079184da68bc3edab5dffb870975b8f622"} Dec 01 20:05:50 crc kubenswrapper[4888]: I1201 20:05:50.038045 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:05:50 crc kubenswrapper[4888]: I1201 20:05:50.038681 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.231475 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.399931 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory\") pod \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.399979 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key\") pod \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.400021 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbhhl\" (UniqueName: \"kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl\") pod \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\" (UID: \"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c\") " Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.405816 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl" (OuterVolumeSpecName: "kube-api-access-bbhhl") pod "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" (UID: "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c"). InnerVolumeSpecName "kube-api-access-bbhhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.433155 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory" (OuterVolumeSpecName: "inventory") pod "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" (UID: "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.433178 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" (UID: "fc27098c-7ab4-4b1d-b5e2-2784d655cd9c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.501774 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.501807 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.501817 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbhhl\" (UniqueName: \"kubernetes.io/projected/fc27098c-7ab4-4b1d-b5e2-2784d655cd9c-kube-api-access-bbhhl\") on node \"crc\" DevicePath \"\"" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.663212 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" event={"ID":"fc27098c-7ab4-4b1d-b5e2-2784d655cd9c","Type":"ContainerDied","Data":"9195fd4fa07fe410eb80534fd2a9245e3c1eb99fc5a387ccc9934e0198f5241f"} Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.663247 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9195fd4fa07fe410eb80534fd2a9245e3c1eb99fc5a387ccc9934e0198f5241f" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.663267 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.759768 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4"] Dec 01 20:05:51 crc kubenswrapper[4888]: E1201 20:05:51.760310 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.760327 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.760501 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc27098c-7ab4-4b1d-b5e2-2784d655cd9c" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.761204 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.763326 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.763603 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.766804 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.766854 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.771522 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4"] Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.909253 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9rcc\" (UniqueName: \"kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.909627 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:51 crc kubenswrapper[4888]: I1201 20:05:51.909813 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.011213 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.011321 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9rcc\" (UniqueName: \"kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.011366 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.015120 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.018390 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.028311 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9rcc\" (UniqueName: \"kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.077704 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.622498 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4"] Dec 01 20:05:52 crc kubenswrapper[4888]: I1201 20:05:52.672061 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" event={"ID":"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e","Type":"ContainerStarted","Data":"512dcff03f74a7ec209e338c144c307f20558119c10aeb400eb58c4ca1d2d6b1"} Dec 01 20:05:53 crc kubenswrapper[4888]: I1201 20:05:53.681012 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" event={"ID":"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e","Type":"ContainerStarted","Data":"6dd4ab291d00f41f1593ea96024482d375de3e0126d9cf8a768c8bb62bec2330"} Dec 01 20:05:53 crc kubenswrapper[4888]: I1201 20:05:53.698990 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" podStartSLOduration=2.17854841 podStartE2EDuration="2.698970737s" podCreationTimestamp="2025-12-01 20:05:51 +0000 UTC" firstStartedPulling="2025-12-01 20:05:52.631169768 +0000 UTC m=+1952.502199682" lastFinishedPulling="2025-12-01 20:05:53.151592095 +0000 UTC m=+1953.022622009" observedRunningTime="2025-12-01 20:05:53.693517443 +0000 UTC m=+1953.564547367" watchObservedRunningTime="2025-12-01 20:05:53.698970737 +0000 UTC m=+1953.570000661" Dec 01 20:05:58 crc kubenswrapper[4888]: I1201 20:05:58.725789 4888 generic.go:334] "Generic (PLEG): container finished" podID="3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" containerID="6dd4ab291d00f41f1593ea96024482d375de3e0126d9cf8a768c8bb62bec2330" exitCode=0 Dec 01 20:05:58 crc kubenswrapper[4888]: I1201 20:05:58.726346 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" event={"ID":"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e","Type":"ContainerDied","Data":"6dd4ab291d00f41f1593ea96024482d375de3e0126d9cf8a768c8bb62bec2330"} Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.112821 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.275789 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key\") pod \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.276178 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory\") pod \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.276502 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9rcc\" (UniqueName: \"kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc\") pod \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\" (UID: \"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e\") " Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.289558 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc" (OuterVolumeSpecName: "kube-api-access-c9rcc") pod "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" (UID: "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e"). InnerVolumeSpecName "kube-api-access-c9rcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.305678 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" (UID: "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.306258 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory" (OuterVolumeSpecName: "inventory") pod "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" (UID: "3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.378768 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9rcc\" (UniqueName: \"kubernetes.io/projected/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-kube-api-access-c9rcc\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.378813 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.378824 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.742428 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" event={"ID":"3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e","Type":"ContainerDied","Data":"512dcff03f74a7ec209e338c144c307f20558119c10aeb400eb58c4ca1d2d6b1"} Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.742466 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="512dcff03f74a7ec209e338c144c307f20558119c10aeb400eb58c4ca1d2d6b1" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.742492 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.824470 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2"] Dec 01 20:06:00 crc kubenswrapper[4888]: E1201 20:06:00.824862 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.824878 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.825068 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.826122 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.830848 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.830924 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.830970 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.831364 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.839332 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2"] Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.995392 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.995525 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:00 crc kubenswrapper[4888]: I1201 20:06:00.995560 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlk28\" (UniqueName: \"kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.097205 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.097338 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.097372 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlk28\" (UniqueName: \"kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.102538 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.107772 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.116613 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlk28\" (UniqueName: \"kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-pfrz2\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.202790 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.690129 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2"] Dec 01 20:06:01 crc kubenswrapper[4888]: I1201 20:06:01.754689 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" event={"ID":"61d5991b-f680-443e-8562-d4e755429abe","Type":"ContainerStarted","Data":"9ed145f2776b32b91f880acaeaf4776749817525f894326c7c05d51b58399103"} Dec 01 20:06:03 crc kubenswrapper[4888]: I1201 20:06:03.787514 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" event={"ID":"61d5991b-f680-443e-8562-d4e755429abe","Type":"ContainerStarted","Data":"f90c19d6ad499c4acd8881b683475a7e29365f1cfd1eb9b65dae330a5d9c3897"} Dec 01 20:06:03 crc kubenswrapper[4888]: I1201 20:06:03.809862 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" podStartSLOduration=2.847073246 podStartE2EDuration="3.809841103s" podCreationTimestamp="2025-12-01 20:06:00 +0000 UTC" firstStartedPulling="2025-12-01 20:06:01.703216469 +0000 UTC m=+1961.574246383" lastFinishedPulling="2025-12-01 20:06:02.665984326 +0000 UTC m=+1962.537014240" observedRunningTime="2025-12-01 20:06:03.803524944 +0000 UTC m=+1963.674554868" watchObservedRunningTime="2025-12-01 20:06:03.809841103 +0000 UTC m=+1963.680871017" Dec 01 20:06:20 crc kubenswrapper[4888]: I1201 20:06:20.037852 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:06:20 crc kubenswrapper[4888]: I1201 20:06:20.038428 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:06:43 crc kubenswrapper[4888]: I1201 20:06:43.122854 4888 generic.go:334] "Generic (PLEG): container finished" podID="61d5991b-f680-443e-8562-d4e755429abe" containerID="f90c19d6ad499c4acd8881b683475a7e29365f1cfd1eb9b65dae330a5d9c3897" exitCode=0 Dec 01 20:06:43 crc kubenswrapper[4888]: I1201 20:06:43.122962 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" event={"ID":"61d5991b-f680-443e-8562-d4e755429abe","Type":"ContainerDied","Data":"f90c19d6ad499c4acd8881b683475a7e29365f1cfd1eb9b65dae330a5d9c3897"} Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.562095 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.623852 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlk28\" (UniqueName: \"kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28\") pod \"61d5991b-f680-443e-8562-d4e755429abe\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.624044 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key\") pod \"61d5991b-f680-443e-8562-d4e755429abe\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.624120 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory\") pod \"61d5991b-f680-443e-8562-d4e755429abe\" (UID: \"61d5991b-f680-443e-8562-d4e755429abe\") " Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.630399 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28" (OuterVolumeSpecName: "kube-api-access-rlk28") pod "61d5991b-f680-443e-8562-d4e755429abe" (UID: "61d5991b-f680-443e-8562-d4e755429abe"). InnerVolumeSpecName "kube-api-access-rlk28". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.652851 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory" (OuterVolumeSpecName: "inventory") pod "61d5991b-f680-443e-8562-d4e755429abe" (UID: "61d5991b-f680-443e-8562-d4e755429abe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.662013 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61d5991b-f680-443e-8562-d4e755429abe" (UID: "61d5991b-f680-443e-8562-d4e755429abe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.726194 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlk28\" (UniqueName: \"kubernetes.io/projected/61d5991b-f680-443e-8562-d4e755429abe-kube-api-access-rlk28\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.726239 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:44 crc kubenswrapper[4888]: I1201 20:06:44.726249 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d5991b-f680-443e-8562-d4e755429abe-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.139215 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" event={"ID":"61d5991b-f680-443e-8562-d4e755429abe","Type":"ContainerDied","Data":"9ed145f2776b32b91f880acaeaf4776749817525f894326c7c05d51b58399103"} Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.139255 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ed145f2776b32b91f880acaeaf4776749817525f894326c7c05d51b58399103" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.139261 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-pfrz2" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.244581 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk"] Dec 01 20:06:45 crc kubenswrapper[4888]: E1201 20:06:45.245404 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d5991b-f680-443e-8562-d4e755429abe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.245427 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d5991b-f680-443e-8562-d4e755429abe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.245689 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d5991b-f680-443e-8562-d4e755429abe" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.246517 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.249078 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.249205 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.249772 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.249839 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.252391 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk"] Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.335813 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w956t\" (UniqueName: \"kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.335854 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.336146 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.438176 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.438279 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w956t\" (UniqueName: \"kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.438404 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.442491 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.443352 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.454584 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w956t\" (UniqueName: \"kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:45 crc kubenswrapper[4888]: I1201 20:06:45.600710 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:06:46 crc kubenswrapper[4888]: I1201 20:06:46.141405 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk"] Dec 01 20:06:46 crc kubenswrapper[4888]: I1201 20:06:46.154181 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:06:47 crc kubenswrapper[4888]: I1201 20:06:47.166002 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" event={"ID":"c80b7e5e-b12e-49c1-8379-a7e33ad355fb","Type":"ContainerStarted","Data":"00c7a7b78e7b0b3f3b88dcfcc7ba87a2a687cf555b6e59c197954a66652762fc"} Dec 01 20:06:47 crc kubenswrapper[4888]: I1201 20:06:47.166308 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" event={"ID":"c80b7e5e-b12e-49c1-8379-a7e33ad355fb","Type":"ContainerStarted","Data":"f99d32363d5cbc6f46c43da35c2ceb756cff2488e39d59134428e416291b5121"} Dec 01 20:06:47 crc kubenswrapper[4888]: I1201 20:06:47.190499 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" podStartSLOduration=1.7363894709999999 podStartE2EDuration="2.190477733s" podCreationTimestamp="2025-12-01 20:06:45 +0000 UTC" firstStartedPulling="2025-12-01 20:06:46.153925232 +0000 UTC m=+2006.024955146" lastFinishedPulling="2025-12-01 20:06:46.608013484 +0000 UTC m=+2006.479043408" observedRunningTime="2025-12-01 20:06:47.184485452 +0000 UTC m=+2007.055515366" watchObservedRunningTime="2025-12-01 20:06:47.190477733 +0000 UTC m=+2007.061507647" Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.038362 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.039144 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.039215 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.040054 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.040117 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c" gracePeriod=600 Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.199805 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c" exitCode=0 Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.199850 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c"} Dec 01 20:06:50 crc kubenswrapper[4888]: I1201 20:06:50.199895 4888 scope.go:117] "RemoveContainer" containerID="b066b9273a736b7bfb1d3fe81f45ac628337e9b19cef1f00a13b896ec74d1ebb" Dec 01 20:06:51 crc kubenswrapper[4888]: I1201 20:06:51.210502 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3"} Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.435052 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.438430 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.455991 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.519135 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.519431 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8knkg\" (UniqueName: \"kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.519504 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.621457 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8knkg\" (UniqueName: \"kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.621531 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.621595 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.622200 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.622223 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.641707 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8knkg\" (UniqueName: \"kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg\") pod \"community-operators-8tdks\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:23 crc kubenswrapper[4888]: I1201 20:07:23.759451 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:24 crc kubenswrapper[4888]: I1201 20:07:24.326094 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:24 crc kubenswrapper[4888]: I1201 20:07:24.525507 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerStarted","Data":"f1db0af7db268c285a873f2a7baf218b0e540710c488f96eb87048ae33c6a94d"} Dec 01 20:07:25 crc kubenswrapper[4888]: I1201 20:07:25.537574 4888 generic.go:334] "Generic (PLEG): container finished" podID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerID="d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0" exitCode=0 Dec 01 20:07:25 crc kubenswrapper[4888]: I1201 20:07:25.537622 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerDied","Data":"d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0"} Dec 01 20:07:26 crc kubenswrapper[4888]: I1201 20:07:26.547993 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerStarted","Data":"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963"} Dec 01 20:07:27 crc kubenswrapper[4888]: I1201 20:07:27.559924 4888 generic.go:334] "Generic (PLEG): container finished" podID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerID="900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963" exitCode=0 Dec 01 20:07:27 crc kubenswrapper[4888]: I1201 20:07:27.559985 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerDied","Data":"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963"} Dec 01 20:07:28 crc kubenswrapper[4888]: I1201 20:07:28.570533 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerStarted","Data":"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33"} Dec 01 20:07:33 crc kubenswrapper[4888]: I1201 20:07:33.760256 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:33 crc kubenswrapper[4888]: I1201 20:07:33.761287 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:33 crc kubenswrapper[4888]: I1201 20:07:33.812895 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:33 crc kubenswrapper[4888]: I1201 20:07:33.840868 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8tdks" podStartSLOduration=8.098108595 podStartE2EDuration="10.840833691s" podCreationTimestamp="2025-12-01 20:07:23 +0000 UTC" firstStartedPulling="2025-12-01 20:07:25.539846766 +0000 UTC m=+2045.410876700" lastFinishedPulling="2025-12-01 20:07:28.282571892 +0000 UTC m=+2048.153601796" observedRunningTime="2025-12-01 20:07:28.591699509 +0000 UTC m=+2048.462729433" watchObservedRunningTime="2025-12-01 20:07:33.840833691 +0000 UTC m=+2053.711863625" Dec 01 20:07:34 crc kubenswrapper[4888]: I1201 20:07:34.682073 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:34 crc kubenswrapper[4888]: I1201 20:07:34.739071 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:36 crc kubenswrapper[4888]: I1201 20:07:36.650705 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8tdks" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="registry-server" containerID="cri-o://2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33" gracePeriod=2 Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.588336 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.645670 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content\") pod \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.645922 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities\") pod \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.646003 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8knkg\" (UniqueName: \"kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg\") pod \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\" (UID: \"e90b55f1-53a8-411a-81e3-95a8d6be47d0\") " Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.646919 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities" (OuterVolumeSpecName: "utilities") pod "e90b55f1-53a8-411a-81e3-95a8d6be47d0" (UID: "e90b55f1-53a8-411a-81e3-95a8d6be47d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.654491 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg" (OuterVolumeSpecName: "kube-api-access-8knkg") pod "e90b55f1-53a8-411a-81e3-95a8d6be47d0" (UID: "e90b55f1-53a8-411a-81e3-95a8d6be47d0"). InnerVolumeSpecName "kube-api-access-8knkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.663290 4888 generic.go:334] "Generic (PLEG): container finished" podID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerID="2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33" exitCode=0 Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.663341 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerDied","Data":"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33"} Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.663367 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8tdks" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.663381 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8tdks" event={"ID":"e90b55f1-53a8-411a-81e3-95a8d6be47d0","Type":"ContainerDied","Data":"f1db0af7db268c285a873f2a7baf218b0e540710c488f96eb87048ae33c6a94d"} Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.663414 4888 scope.go:117] "RemoveContainer" containerID="2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.706301 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e90b55f1-53a8-411a-81e3-95a8d6be47d0" (UID: "e90b55f1-53a8-411a-81e3-95a8d6be47d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.709487 4888 scope.go:117] "RemoveContainer" containerID="900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.727268 4888 scope.go:117] "RemoveContainer" containerID="d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.748914 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.748962 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e90b55f1-53a8-411a-81e3-95a8d6be47d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.748978 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8knkg\" (UniqueName: \"kubernetes.io/projected/e90b55f1-53a8-411a-81e3-95a8d6be47d0-kube-api-access-8knkg\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.771207 4888 scope.go:117] "RemoveContainer" containerID="2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33" Dec 01 20:07:37 crc kubenswrapper[4888]: E1201 20:07:37.771788 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33\": container with ID starting with 2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33 not found: ID does not exist" containerID="2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.771846 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33"} err="failed to get container status \"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33\": rpc error: code = NotFound desc = could not find container \"2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33\": container with ID starting with 2a960cac907ce8bd2f8dea44252d34b243bc088db2c7b02d59084c1d18d1ed33 not found: ID does not exist" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.771885 4888 scope.go:117] "RemoveContainer" containerID="900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963" Dec 01 20:07:37 crc kubenswrapper[4888]: E1201 20:07:37.772366 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963\": container with ID starting with 900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963 not found: ID does not exist" containerID="900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.772396 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963"} err="failed to get container status \"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963\": rpc error: code = NotFound desc = could not find container \"900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963\": container with ID starting with 900622ee6587fbd77477726b0cb33aa6a6b758fb121addb4ab9ee7e025f51963 not found: ID does not exist" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.772416 4888 scope.go:117] "RemoveContainer" containerID="d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0" Dec 01 20:07:37 crc kubenswrapper[4888]: E1201 20:07:37.772801 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0\": container with ID starting with d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0 not found: ID does not exist" containerID="d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.772826 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0"} err="failed to get container status \"d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0\": rpc error: code = NotFound desc = could not find container \"d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0\": container with ID starting with d94151333c3f371723acbd3e786fa068a5597d52be899e216bcbd3a2c9949bb0 not found: ID does not exist" Dec 01 20:07:37 crc kubenswrapper[4888]: I1201 20:07:37.994104 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:38 crc kubenswrapper[4888]: I1201 20:07:38.001165 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8tdks"] Dec 01 20:07:38 crc kubenswrapper[4888]: I1201 20:07:38.467936 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" path="/var/lib/kubelet/pods/e90b55f1-53a8-411a-81e3-95a8d6be47d0/volumes" Dec 01 20:07:40 crc kubenswrapper[4888]: I1201 20:07:40.690391 4888 generic.go:334] "Generic (PLEG): container finished" podID="c80b7e5e-b12e-49c1-8379-a7e33ad355fb" containerID="00c7a7b78e7b0b3f3b88dcfcc7ba87a2a687cf555b6e59c197954a66652762fc" exitCode=0 Dec 01 20:07:40 crc kubenswrapper[4888]: I1201 20:07:40.690521 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" event={"ID":"c80b7e5e-b12e-49c1-8379-a7e33ad355fb","Type":"ContainerDied","Data":"00c7a7b78e7b0b3f3b88dcfcc7ba87a2a687cf555b6e59c197954a66652762fc"} Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.138254 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.237574 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:42 crc kubenswrapper[4888]: E1201 20:07:42.239084 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="extract-content" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239121 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="extract-content" Dec 01 20:07:42 crc kubenswrapper[4888]: E1201 20:07:42.239161 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="registry-server" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239172 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="registry-server" Dec 01 20:07:42 crc kubenswrapper[4888]: E1201 20:07:42.239256 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="extract-utilities" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239282 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="extract-utilities" Dec 01 20:07:42 crc kubenswrapper[4888]: E1201 20:07:42.239330 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c80b7e5e-b12e-49c1-8379-a7e33ad355fb" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239347 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="c80b7e5e-b12e-49c1-8379-a7e33ad355fb" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239747 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e90b55f1-53a8-411a-81e3-95a8d6be47d0" containerName="registry-server" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.239810 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="c80b7e5e-b12e-49c1-8379-a7e33ad355fb" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.242424 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory\") pod \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.242599 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key\") pod \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.242699 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w956t\" (UniqueName: \"kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t\") pod \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\" (UID: \"c80b7e5e-b12e-49c1-8379-a7e33ad355fb\") " Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.242819 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.253010 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t" (OuterVolumeSpecName: "kube-api-access-w956t") pod "c80b7e5e-b12e-49c1-8379-a7e33ad355fb" (UID: "c80b7e5e-b12e-49c1-8379-a7e33ad355fb"). InnerVolumeSpecName "kube-api-access-w956t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.258142 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.289587 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory" (OuterVolumeSpecName: "inventory") pod "c80b7e5e-b12e-49c1-8379-a7e33ad355fb" (UID: "c80b7e5e-b12e-49c1-8379-a7e33ad355fb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.298550 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c80b7e5e-b12e-49c1-8379-a7e33ad355fb" (UID: "c80b7e5e-b12e-49c1-8379-a7e33ad355fb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.345606 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq4cs\" (UniqueName: \"kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.345915 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.345962 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.346126 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.346161 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.346175 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w956t\" (UniqueName: \"kubernetes.io/projected/c80b7e5e-b12e-49c1-8379-a7e33ad355fb-kube-api-access-w956t\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.448327 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.448389 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.448460 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq4cs\" (UniqueName: \"kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.449231 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.449246 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.472139 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq4cs\" (UniqueName: \"kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs\") pod \"redhat-operators-tt725\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.661060 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.741873 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" event={"ID":"c80b7e5e-b12e-49c1-8379-a7e33ad355fb","Type":"ContainerDied","Data":"f99d32363d5cbc6f46c43da35c2ceb756cff2488e39d59134428e416291b5121"} Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.742262 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f99d32363d5cbc6f46c43da35c2ceb756cff2488e39d59134428e416291b5121" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.741965 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.866971 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k46s7"] Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.868485 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.875466 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.875688 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.875884 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.876425 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.888454 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k46s7"] Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.960061 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4xck\" (UniqueName: \"kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.960213 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:42 crc kubenswrapper[4888]: I1201 20:07:42.960432 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.063611 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4xck\" (UniqueName: \"kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.063981 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.064164 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.069026 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.071231 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.086699 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4xck\" (UniqueName: \"kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck\") pod \"ssh-known-hosts-edpm-deployment-k46s7\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.189064 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.199125 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.627903 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-k46s7"] Dec 01 20:07:43 crc kubenswrapper[4888]: W1201 20:07:43.636650 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49b24356_5b0c_43f4_a3d8_0a74c3aa57d9.slice/crio-36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0 WatchSource:0}: Error finding container 36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0: Status 404 returned error can't find the container with id 36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0 Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.752557 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" event={"ID":"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9","Type":"ContainerStarted","Data":"36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0"} Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.753842 4888 generic.go:334] "Generic (PLEG): container finished" podID="950b37a6-f145-4287-8f41-174cd2540519" containerID="75dda179c36b1bb952fee828083fa063bef1cbdd9d232b4929525645bea4afc1" exitCode=0 Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.753868 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerDied","Data":"75dda179c36b1bb952fee828083fa063bef1cbdd9d232b4929525645bea4afc1"} Dec 01 20:07:43 crc kubenswrapper[4888]: I1201 20:07:43.753882 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerStarted","Data":"3860207b8dc13fe4f52c7f54296b166684f4acbad4b3cc3bb373dbff22f690c7"} Dec 01 20:07:44 crc kubenswrapper[4888]: I1201 20:07:44.767281 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" event={"ID":"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9","Type":"ContainerStarted","Data":"92f7d9c5b0e3e1f01dfbfcd321b009229fce6b7b3dba486c1cbae87d0f4b47a1"} Dec 01 20:07:44 crc kubenswrapper[4888]: I1201 20:07:44.785379 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" podStartSLOduration=2.187822562 podStartE2EDuration="2.785354058s" podCreationTimestamp="2025-12-01 20:07:42 +0000 UTC" firstStartedPulling="2025-12-01 20:07:43.639396899 +0000 UTC m=+2063.510426813" lastFinishedPulling="2025-12-01 20:07:44.236928395 +0000 UTC m=+2064.107958309" observedRunningTime="2025-12-01 20:07:44.78192172 +0000 UTC m=+2064.652951654" watchObservedRunningTime="2025-12-01 20:07:44.785354058 +0000 UTC m=+2064.656383972" Dec 01 20:07:45 crc kubenswrapper[4888]: I1201 20:07:45.778513 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerStarted","Data":"e1338cedb8fcd728f481737e3400976a18dbc769f77698a8a1819f2e6b9f33a7"} Dec 01 20:07:46 crc kubenswrapper[4888]: I1201 20:07:46.787754 4888 generic.go:334] "Generic (PLEG): container finished" podID="950b37a6-f145-4287-8f41-174cd2540519" containerID="e1338cedb8fcd728f481737e3400976a18dbc769f77698a8a1819f2e6b9f33a7" exitCode=0 Dec 01 20:07:46 crc kubenswrapper[4888]: I1201 20:07:46.787815 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerDied","Data":"e1338cedb8fcd728f481737e3400976a18dbc769f77698a8a1819f2e6b9f33a7"} Dec 01 20:07:47 crc kubenswrapper[4888]: I1201 20:07:47.799447 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerStarted","Data":"a5aaf46ce8fad6ef44f88c5b180c98ead8e81b8c588a1e6bcea9848c02daaa20"} Dec 01 20:07:51 crc kubenswrapper[4888]: I1201 20:07:51.837317 4888 generic.go:334] "Generic (PLEG): container finished" podID="49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" containerID="92f7d9c5b0e3e1f01dfbfcd321b009229fce6b7b3dba486c1cbae87d0f4b47a1" exitCode=0 Dec 01 20:07:51 crc kubenswrapper[4888]: I1201 20:07:51.837397 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" event={"ID":"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9","Type":"ContainerDied","Data":"92f7d9c5b0e3e1f01dfbfcd321b009229fce6b7b3dba486c1cbae87d0f4b47a1"} Dec 01 20:07:51 crc kubenswrapper[4888]: I1201 20:07:51.858998 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tt725" podStartSLOduration=6.192731965 podStartE2EDuration="9.858976545s" podCreationTimestamp="2025-12-01 20:07:42 +0000 UTC" firstStartedPulling="2025-12-01 20:07:43.755453074 +0000 UTC m=+2063.626482988" lastFinishedPulling="2025-12-01 20:07:47.421697654 +0000 UTC m=+2067.292727568" observedRunningTime="2025-12-01 20:07:47.819566131 +0000 UTC m=+2067.690596055" watchObservedRunningTime="2025-12-01 20:07:51.858976545 +0000 UTC m=+2071.730006449" Dec 01 20:07:52 crc kubenswrapper[4888]: I1201 20:07:52.661434 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:52 crc kubenswrapper[4888]: I1201 20:07:52.661902 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:52 crc kubenswrapper[4888]: I1201 20:07:52.709975 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:52 crc kubenswrapper[4888]: I1201 20:07:52.894214 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.031559 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.335155 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.377636 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam\") pod \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.377864 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0\") pod \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.377927 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4xck\" (UniqueName: \"kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck\") pod \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\" (UID: \"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9\") " Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.383254 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck" (OuterVolumeSpecName: "kube-api-access-m4xck") pod "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" (UID: "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9"). InnerVolumeSpecName "kube-api-access-m4xck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.408765 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" (UID: "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.410361 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" (UID: "49b24356-5b0c-43f4-a3d8-0a74c3aa57d9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.480363 4888 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.480449 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4xck\" (UniqueName: \"kubernetes.io/projected/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-kube-api-access-m4xck\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.480462 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/49b24356-5b0c-43f4-a3d8-0a74c3aa57d9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.857343 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" event={"ID":"49b24356-5b0c-43f4-a3d8-0a74c3aa57d9","Type":"ContainerDied","Data":"36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0"} Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.857754 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36b61a8bf3ada39182e73866aec26301866d8415d7df5e555f1a526d67927bf0" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.857363 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-k46s7" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.938729 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl"] Dec 01 20:07:53 crc kubenswrapper[4888]: E1201 20:07:53.940041 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.940078 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.941645 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="49b24356-5b0c-43f4-a3d8-0a74c3aa57d9" containerName="ssh-known-hosts-edpm-deployment" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.953361 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.956907 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.957037 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.957098 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.957109 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.977645 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl"] Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.989608 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjlmm\" (UniqueName: \"kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.989668 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:53 crc kubenswrapper[4888]: I1201 20:07:53.989728 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.091692 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.091908 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjlmm\" (UniqueName: \"kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.091945 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.103124 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.103572 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.107638 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjlmm\" (UniqueName: \"kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-nbgcl\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.281001 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.853562 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl"] Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.876037 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" event={"ID":"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a","Type":"ContainerStarted","Data":"7969a1ca5fad86a6e6bdf0b8691d5a75e0d799fb2c324e4a16ffade19963adaf"} Dec 01 20:07:54 crc kubenswrapper[4888]: I1201 20:07:54.876299 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tt725" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="registry-server" containerID="cri-o://a5aaf46ce8fad6ef44f88c5b180c98ead8e81b8c588a1e6bcea9848c02daaa20" gracePeriod=2 Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.886598 4888 generic.go:334] "Generic (PLEG): container finished" podID="950b37a6-f145-4287-8f41-174cd2540519" containerID="a5aaf46ce8fad6ef44f88c5b180c98ead8e81b8c588a1e6bcea9848c02daaa20" exitCode=0 Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.886704 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerDied","Data":"a5aaf46ce8fad6ef44f88c5b180c98ead8e81b8c588a1e6bcea9848c02daaa20"} Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.887302 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tt725" event={"ID":"950b37a6-f145-4287-8f41-174cd2540519","Type":"ContainerDied","Data":"3860207b8dc13fe4f52c7f54296b166684f4acbad4b3cc3bb373dbff22f690c7"} Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.887317 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3860207b8dc13fe4f52c7f54296b166684f4acbad4b3cc3bb373dbff22f690c7" Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.889334 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" event={"ID":"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a","Type":"ContainerStarted","Data":"1d927c8d2b49a0bfedd5c8f7066215bbfed8784f7f32d981c4d0ab62fffdb369"} Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.922102 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" podStartSLOduration=2.295846158 podStartE2EDuration="2.922081399s" podCreationTimestamp="2025-12-01 20:07:53 +0000 UTC" firstStartedPulling="2025-12-01 20:07:54.865690858 +0000 UTC m=+2074.736720792" lastFinishedPulling="2025-12-01 20:07:55.491926119 +0000 UTC m=+2075.362956033" observedRunningTime="2025-12-01 20:07:55.914621976 +0000 UTC m=+2075.785651890" watchObservedRunningTime="2025-12-01 20:07:55.922081399 +0000 UTC m=+2075.793111313" Dec 01 20:07:55 crc kubenswrapper[4888]: I1201 20:07:55.926540 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.032880 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content\") pod \"950b37a6-f145-4287-8f41-174cd2540519\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.032944 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq4cs\" (UniqueName: \"kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs\") pod \"950b37a6-f145-4287-8f41-174cd2540519\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.033141 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities\") pod \"950b37a6-f145-4287-8f41-174cd2540519\" (UID: \"950b37a6-f145-4287-8f41-174cd2540519\") " Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.033934 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities" (OuterVolumeSpecName: "utilities") pod "950b37a6-f145-4287-8f41-174cd2540519" (UID: "950b37a6-f145-4287-8f41-174cd2540519"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.038211 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs" (OuterVolumeSpecName: "kube-api-access-kq4cs") pod "950b37a6-f145-4287-8f41-174cd2540519" (UID: "950b37a6-f145-4287-8f41-174cd2540519"). InnerVolumeSpecName "kube-api-access-kq4cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.135152 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq4cs\" (UniqueName: \"kubernetes.io/projected/950b37a6-f145-4287-8f41-174cd2540519-kube-api-access-kq4cs\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.135211 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.148423 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "950b37a6-f145-4287-8f41-174cd2540519" (UID: "950b37a6-f145-4287-8f41-174cd2540519"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.237106 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/950b37a6-f145-4287-8f41-174cd2540519-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.898343 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tt725" Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.934332 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:56 crc kubenswrapper[4888]: I1201 20:07:56.945466 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tt725"] Dec 01 20:07:58 crc kubenswrapper[4888]: I1201 20:07:58.481135 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="950b37a6-f145-4287-8f41-174cd2540519" path="/var/lib/kubelet/pods/950b37a6-f145-4287-8f41-174cd2540519/volumes" Dec 01 20:08:03 crc kubenswrapper[4888]: I1201 20:08:03.963311 4888 generic.go:334] "Generic (PLEG): container finished" podID="0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" containerID="1d927c8d2b49a0bfedd5c8f7066215bbfed8784f7f32d981c4d0ab62fffdb369" exitCode=0 Dec 01 20:08:03 crc kubenswrapper[4888]: I1201 20:08:03.963393 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" event={"ID":"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a","Type":"ContainerDied","Data":"1d927c8d2b49a0bfedd5c8f7066215bbfed8784f7f32d981c4d0ab62fffdb369"} Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.346608 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.428150 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key\") pod \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.428258 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjlmm\" (UniqueName: \"kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm\") pod \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.428387 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory\") pod \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\" (UID: \"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a\") " Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.435712 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm" (OuterVolumeSpecName: "kube-api-access-mjlmm") pod "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" (UID: "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a"). InnerVolumeSpecName "kube-api-access-mjlmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.457450 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" (UID: "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.463347 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory" (OuterVolumeSpecName: "inventory") pod "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" (UID: "0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.530120 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjlmm\" (UniqueName: \"kubernetes.io/projected/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-kube-api-access-mjlmm\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.530149 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.530158 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.984769 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" event={"ID":"0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a","Type":"ContainerDied","Data":"7969a1ca5fad86a6e6bdf0b8691d5a75e0d799fb2c324e4a16ffade19963adaf"} Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.984816 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7969a1ca5fad86a6e6bdf0b8691d5a75e0d799fb2c324e4a16ffade19963adaf" Dec 01 20:08:05 crc kubenswrapper[4888]: I1201 20:08:05.984823 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-nbgcl" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.048666 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm"] Dec 01 20:08:06 crc kubenswrapper[4888]: E1201 20:08:06.049079 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="extract-utilities" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049101 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="extract-utilities" Dec 01 20:08:06 crc kubenswrapper[4888]: E1201 20:08:06.049129 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049139 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:06 crc kubenswrapper[4888]: E1201 20:08:06.049150 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="registry-server" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049157 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="registry-server" Dec 01 20:08:06 crc kubenswrapper[4888]: E1201 20:08:06.049167 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="extract-content" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049173 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="extract-content" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049380 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="950b37a6-f145-4287-8f41-174cd2540519" containerName="registry-server" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.049398 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.050065 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.054743 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.055082 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.055294 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.055520 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.064116 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm"] Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.143207 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5zs5\" (UniqueName: \"kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.143566 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.143716 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.245892 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5zs5\" (UniqueName: \"kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.245953 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.246004 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.251009 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.251888 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.262272 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5zs5\" (UniqueName: \"kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.368856 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.871525 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm"] Dec 01 20:08:06 crc kubenswrapper[4888]: I1201 20:08:06.993745 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" event={"ID":"f6a86735-753a-4ef6-9e99-5394105fcff0","Type":"ContainerStarted","Data":"b01d86c35d2ec782525419df9c944ba40e750bf70aaf8753c41b689018d76bd5"} Dec 01 20:08:08 crc kubenswrapper[4888]: I1201 20:08:08.004326 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" event={"ID":"f6a86735-753a-4ef6-9e99-5394105fcff0","Type":"ContainerStarted","Data":"b8317b8d2fa289dd2480b38bddb2f13bf36c1f264070473418212e4615e096c7"} Dec 01 20:08:08 crc kubenswrapper[4888]: I1201 20:08:08.022870 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" podStartSLOduration=1.413533604 podStartE2EDuration="2.022841651s" podCreationTimestamp="2025-12-01 20:08:06 +0000 UTC" firstStartedPulling="2025-12-01 20:08:06.876627154 +0000 UTC m=+2086.747657068" lastFinishedPulling="2025-12-01 20:08:07.485935201 +0000 UTC m=+2087.356965115" observedRunningTime="2025-12-01 20:08:08.018657211 +0000 UTC m=+2087.889687145" watchObservedRunningTime="2025-12-01 20:08:08.022841651 +0000 UTC m=+2087.893871575" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.314432 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.316896 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.337930 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.375827 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.375916 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpsxz\" (UniqueName: \"kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.376021 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.477815 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpsxz\" (UniqueName: \"kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.479652 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.480148 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.481222 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.481699 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.508201 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpsxz\" (UniqueName: \"kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz\") pod \"redhat-marketplace-92dbp\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:13 crc kubenswrapper[4888]: I1201 20:08:13.655481 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:14 crc kubenswrapper[4888]: I1201 20:08:14.077602 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:15 crc kubenswrapper[4888]: I1201 20:08:15.077048 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerID="3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff" exitCode=0 Dec 01 20:08:15 crc kubenswrapper[4888]: I1201 20:08:15.077126 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerDied","Data":"3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff"} Dec 01 20:08:15 crc kubenswrapper[4888]: I1201 20:08:15.077387 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerStarted","Data":"37ae5847cac2ac615a0591cf706db02a16675f62e0d4d60b180eebaa0dfd533a"} Dec 01 20:08:17 crc kubenswrapper[4888]: I1201 20:08:17.098687 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerID="164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78" exitCode=0 Dec 01 20:08:17 crc kubenswrapper[4888]: I1201 20:08:17.098764 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerDied","Data":"164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78"} Dec 01 20:08:18 crc kubenswrapper[4888]: I1201 20:08:18.107341 4888 generic.go:334] "Generic (PLEG): container finished" podID="f6a86735-753a-4ef6-9e99-5394105fcff0" containerID="b8317b8d2fa289dd2480b38bddb2f13bf36c1f264070473418212e4615e096c7" exitCode=0 Dec 01 20:08:18 crc kubenswrapper[4888]: I1201 20:08:18.107406 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" event={"ID":"f6a86735-753a-4ef6-9e99-5394105fcff0","Type":"ContainerDied","Data":"b8317b8d2fa289dd2480b38bddb2f13bf36c1f264070473418212e4615e096c7"} Dec 01 20:08:18 crc kubenswrapper[4888]: I1201 20:08:18.110805 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerStarted","Data":"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371"} Dec 01 20:08:18 crc kubenswrapper[4888]: I1201 20:08:18.139598 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-92dbp" podStartSLOduration=2.429417012 podStartE2EDuration="5.13957341s" podCreationTimestamp="2025-12-01 20:08:13 +0000 UTC" firstStartedPulling="2025-12-01 20:08:15.080101662 +0000 UTC m=+2094.951131606" lastFinishedPulling="2025-12-01 20:08:17.79025809 +0000 UTC m=+2097.661288004" observedRunningTime="2025-12-01 20:08:18.13784802 +0000 UTC m=+2098.008877934" watchObservedRunningTime="2025-12-01 20:08:18.13957341 +0000 UTC m=+2098.010603314" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.546566 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.607832 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory\") pod \"f6a86735-753a-4ef6-9e99-5394105fcff0\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.608001 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5zs5\" (UniqueName: \"kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5\") pod \"f6a86735-753a-4ef6-9e99-5394105fcff0\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.608733 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key\") pod \"f6a86735-753a-4ef6-9e99-5394105fcff0\" (UID: \"f6a86735-753a-4ef6-9e99-5394105fcff0\") " Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.612940 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5" (OuterVolumeSpecName: "kube-api-access-r5zs5") pod "f6a86735-753a-4ef6-9e99-5394105fcff0" (UID: "f6a86735-753a-4ef6-9e99-5394105fcff0"). InnerVolumeSpecName "kube-api-access-r5zs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.634313 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory" (OuterVolumeSpecName: "inventory") pod "f6a86735-753a-4ef6-9e99-5394105fcff0" (UID: "f6a86735-753a-4ef6-9e99-5394105fcff0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.635382 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f6a86735-753a-4ef6-9e99-5394105fcff0" (UID: "f6a86735-753a-4ef6-9e99-5394105fcff0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.711040 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.711075 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5zs5\" (UniqueName: \"kubernetes.io/projected/f6a86735-753a-4ef6-9e99-5394105fcff0-kube-api-access-r5zs5\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:19 crc kubenswrapper[4888]: I1201 20:08:19.711085 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6a86735-753a-4ef6-9e99-5394105fcff0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.128829 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" event={"ID":"f6a86735-753a-4ef6-9e99-5394105fcff0","Type":"ContainerDied","Data":"b01d86c35d2ec782525419df9c944ba40e750bf70aaf8753c41b689018d76bd5"} Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.128882 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b01d86c35d2ec782525419df9c944ba40e750bf70aaf8753c41b689018d76bd5" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.129078 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.206768 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz"] Dec 01 20:08:20 crc kubenswrapper[4888]: E1201 20:08:20.207222 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a86735-753a-4ef6-9e99-5394105fcff0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.207241 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a86735-753a-4ef6-9e99-5394105fcff0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.207459 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6a86735-753a-4ef6-9e99-5394105fcff0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.208070 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210310 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210633 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210659 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210669 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210739 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.210845 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.211596 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.211667 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.225291 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz"] Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.320963 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321025 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321165 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321269 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321305 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321401 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321433 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321464 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmt9q\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321486 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321595 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321675 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321735 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321874 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.321956 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423215 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423267 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423321 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423357 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423404 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423436 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423464 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423503 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423528 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423549 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmt9q\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423601 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423649 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423694 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.423729 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.424906 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.425984 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.426864 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.427197 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.428041 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.428492 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.428986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.428802 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.430056 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.430264 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.431205 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.436624 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.441616 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.448006 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.448224 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.448337 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.448779 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.453748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.460041 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmt9q\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.461399 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.526525 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:08:20 crc kubenswrapper[4888]: I1201 20:08:20.534863 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:08:21 crc kubenswrapper[4888]: I1201 20:08:21.093484 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz"] Dec 01 20:08:21 crc kubenswrapper[4888]: W1201 20:08:21.096478 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cc88e76_38aa_4d88_97e3_2d9829760fdf.slice/crio-c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a WatchSource:0}: Error finding container c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a: Status 404 returned error can't find the container with id c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a Dec 01 20:08:21 crc kubenswrapper[4888]: I1201 20:08:21.137290 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" event={"ID":"2cc88e76-38aa-4d88-97e3-2d9829760fdf","Type":"ContainerStarted","Data":"c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a"} Dec 01 20:08:21 crc kubenswrapper[4888]: I1201 20:08:21.925170 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:08:23 crc kubenswrapper[4888]: I1201 20:08:23.156779 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" event={"ID":"2cc88e76-38aa-4d88-97e3-2d9829760fdf","Type":"ContainerStarted","Data":"041d02fd7db1bbe98aedfe36cddd343547f9f0329d24e0b99ee198057107142b"} Dec 01 20:08:23 crc kubenswrapper[4888]: I1201 20:08:23.181674 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" podStartSLOduration=2.361919167 podStartE2EDuration="3.181651086s" podCreationTimestamp="2025-12-01 20:08:20 +0000 UTC" firstStartedPulling="2025-12-01 20:08:21.100361856 +0000 UTC m=+2100.971391770" lastFinishedPulling="2025-12-01 20:08:21.920093775 +0000 UTC m=+2101.791123689" observedRunningTime="2025-12-01 20:08:23.17267192 +0000 UTC m=+2103.043701844" watchObservedRunningTime="2025-12-01 20:08:23.181651086 +0000 UTC m=+2103.052681000" Dec 01 20:08:23 crc kubenswrapper[4888]: I1201 20:08:23.655848 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:23 crc kubenswrapper[4888]: I1201 20:08:23.656075 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:23 crc kubenswrapper[4888]: I1201 20:08:23.738119 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:24 crc kubenswrapper[4888]: I1201 20:08:24.212964 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:24 crc kubenswrapper[4888]: I1201 20:08:24.256707 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.182364 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-92dbp" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="registry-server" containerID="cri-o://2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371" gracePeriod=2 Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.612543 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.664879 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities\") pod \"fc0ac256-73ac-4b82-b300-324a594e6eac\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.664948 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpsxz\" (UniqueName: \"kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz\") pod \"fc0ac256-73ac-4b82-b300-324a594e6eac\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.665149 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content\") pod \"fc0ac256-73ac-4b82-b300-324a594e6eac\" (UID: \"fc0ac256-73ac-4b82-b300-324a594e6eac\") " Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.666759 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities" (OuterVolumeSpecName: "utilities") pod "fc0ac256-73ac-4b82-b300-324a594e6eac" (UID: "fc0ac256-73ac-4b82-b300-324a594e6eac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.673901 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz" (OuterVolumeSpecName: "kube-api-access-mpsxz") pod "fc0ac256-73ac-4b82-b300-324a594e6eac" (UID: "fc0ac256-73ac-4b82-b300-324a594e6eac"). InnerVolumeSpecName "kube-api-access-mpsxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.688363 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc0ac256-73ac-4b82-b300-324a594e6eac" (UID: "fc0ac256-73ac-4b82-b300-324a594e6eac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.768524 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.768840 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc0ac256-73ac-4b82-b300-324a594e6eac-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:26 crc kubenswrapper[4888]: I1201 20:08:26.768911 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpsxz\" (UniqueName: \"kubernetes.io/projected/fc0ac256-73ac-4b82-b300-324a594e6eac-kube-api-access-mpsxz\") on node \"crc\" DevicePath \"\"" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.194422 4888 generic.go:334] "Generic (PLEG): container finished" podID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerID="2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371" exitCode=0 Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.194486 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerDied","Data":"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371"} Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.194498 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-92dbp" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.194538 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-92dbp" event={"ID":"fc0ac256-73ac-4b82-b300-324a594e6eac","Type":"ContainerDied","Data":"37ae5847cac2ac615a0591cf706db02a16675f62e0d4d60b180eebaa0dfd533a"} Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.194557 4888 scope.go:117] "RemoveContainer" containerID="2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.221079 4888 scope.go:117] "RemoveContainer" containerID="164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.230370 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.242814 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-92dbp"] Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.263272 4888 scope.go:117] "RemoveContainer" containerID="3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.307171 4888 scope.go:117] "RemoveContainer" containerID="2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371" Dec 01 20:08:27 crc kubenswrapper[4888]: E1201 20:08:27.308067 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371\": container with ID starting with 2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371 not found: ID does not exist" containerID="2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.308120 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371"} err="failed to get container status \"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371\": rpc error: code = NotFound desc = could not find container \"2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371\": container with ID starting with 2c1017eaeffd4deebb1e8109bba1c91bc033197a358cbdd77121501897e4a371 not found: ID does not exist" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.308157 4888 scope.go:117] "RemoveContainer" containerID="164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78" Dec 01 20:08:27 crc kubenswrapper[4888]: E1201 20:08:27.308676 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78\": container with ID starting with 164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78 not found: ID does not exist" containerID="164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.308708 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78"} err="failed to get container status \"164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78\": rpc error: code = NotFound desc = could not find container \"164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78\": container with ID starting with 164dcd1a75e77f76cb625d1398dd080fcebbd6049291db3eb36018d2d360fc78 not found: ID does not exist" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.308738 4888 scope.go:117] "RemoveContainer" containerID="3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff" Dec 01 20:08:27 crc kubenswrapper[4888]: E1201 20:08:27.309001 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff\": container with ID starting with 3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff not found: ID does not exist" containerID="3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff" Dec 01 20:08:27 crc kubenswrapper[4888]: I1201 20:08:27.309037 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff"} err="failed to get container status \"3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff\": rpc error: code = NotFound desc = could not find container \"3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff\": container with ID starting with 3d34395ce0041a0d95ee81109c15692d0629827901d4e2e4a0a2662422d5ddff not found: ID does not exist" Dec 01 20:08:28 crc kubenswrapper[4888]: I1201 20:08:28.463094 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" path="/var/lib/kubelet/pods/fc0ac256-73ac-4b82-b300-324a594e6eac/volumes" Dec 01 20:08:50 crc kubenswrapper[4888]: I1201 20:08:50.037747 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:08:50 crc kubenswrapper[4888]: I1201 20:08:50.038795 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:09:03 crc kubenswrapper[4888]: I1201 20:09:03.512267 4888 generic.go:334] "Generic (PLEG): container finished" podID="2cc88e76-38aa-4d88-97e3-2d9829760fdf" containerID="041d02fd7db1bbe98aedfe36cddd343547f9f0329d24e0b99ee198057107142b" exitCode=0 Dec 01 20:09:03 crc kubenswrapper[4888]: I1201 20:09:03.512369 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" event={"ID":"2cc88e76-38aa-4d88-97e3-2d9829760fdf","Type":"ContainerDied","Data":"041d02fd7db1bbe98aedfe36cddd343547f9f0329d24e0b99ee198057107142b"} Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.912264 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919569 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919631 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919701 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919751 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmt9q\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919793 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919852 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919885 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919920 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.919960 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.920033 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.920086 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.920117 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.920159 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.920213 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle\") pod \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\" (UID: \"2cc88e76-38aa-4d88-97e3-2d9829760fdf\") " Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.926908 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.927038 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.927085 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.928438 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q" (OuterVolumeSpecName: "kube-api-access-rmt9q") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "kube-api-access-rmt9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.935220 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.935543 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.943584 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.944651 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.944762 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.944877 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.944903 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.954122 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.962394 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory" (OuterVolumeSpecName: "inventory") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:04 crc kubenswrapper[4888]: I1201 20:09:04.966835 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2cc88e76-38aa-4d88-97e3-2d9829760fdf" (UID: "2cc88e76-38aa-4d88-97e3-2d9829760fdf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022438 4888 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022482 4888 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022494 4888 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022506 4888 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022519 4888 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022532 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022543 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022558 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022571 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmt9q\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-kube-api-access-rmt9q\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022586 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022601 4888 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022613 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022625 4888 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cc88e76-38aa-4d88-97e3-2d9829760fdf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.022639 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/2cc88e76-38aa-4d88-97e3-2d9829760fdf-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.533861 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" event={"ID":"2cc88e76-38aa-4d88-97e3-2d9829760fdf","Type":"ContainerDied","Data":"c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a"} Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.534322 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9eb1606885932e10473d880edf614b20233d04af15bd3068d1efee5027f003a" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.533948 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.656651 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r"] Dec 01 20:09:05 crc kubenswrapper[4888]: E1201 20:09:05.657084 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="extract-content" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657120 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="extract-content" Dec 01 20:09:05 crc kubenswrapper[4888]: E1201 20:09:05.657146 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cc88e76-38aa-4d88-97e3-2d9829760fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657158 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cc88e76-38aa-4d88-97e3-2d9829760fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:09:05 crc kubenswrapper[4888]: E1201 20:09:05.657206 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="extract-utilities" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657215 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="extract-utilities" Dec 01 20:09:05 crc kubenswrapper[4888]: E1201 20:09:05.657225 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="registry-server" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657233 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="registry-server" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657485 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc0ac256-73ac-4b82-b300-324a594e6eac" containerName="registry-server" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.657515 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cc88e76-38aa-4d88-97e3-2d9829760fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.658256 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.661429 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.661533 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.661559 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.661948 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.662740 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.673328 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r"] Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.742307 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrlxw\" (UniqueName: \"kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.742369 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.742472 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.742527 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.742584 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.846150 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.846252 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.846321 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.846484 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrlxw\" (UniqueName: \"kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.846577 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.848383 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.851771 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.851978 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.857878 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:05 crc kubenswrapper[4888]: I1201 20:09:05.862986 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrlxw\" (UniqueName: \"kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-rtj2r\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:06 crc kubenswrapper[4888]: I1201 20:09:06.028208 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:09:06 crc kubenswrapper[4888]: I1201 20:09:06.557255 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r"] Dec 01 20:09:07 crc kubenswrapper[4888]: I1201 20:09:07.551785 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" event={"ID":"2e35aaa2-7b44-48c2-b94d-46f753c5698f","Type":"ContainerStarted","Data":"4b8659fe72d73922a84b323908a4de583cdcaa4db7c1bc83ba40e8f65d50b362"} Dec 01 20:09:07 crc kubenswrapper[4888]: I1201 20:09:07.552366 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" event={"ID":"2e35aaa2-7b44-48c2-b94d-46f753c5698f","Type":"ContainerStarted","Data":"ba8d8c96c38286fcdf60130ac14ece6d6fa8d09b6e07b9a1cde4cbdf4dfcc7e6"} Dec 01 20:09:07 crc kubenswrapper[4888]: I1201 20:09:07.577327 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" podStartSLOduration=2.002045618 podStartE2EDuration="2.577161509s" podCreationTimestamp="2025-12-01 20:09:05 +0000 UTC" firstStartedPulling="2025-12-01 20:09:06.566705671 +0000 UTC m=+2146.437735585" lastFinishedPulling="2025-12-01 20:09:07.141821562 +0000 UTC m=+2147.012851476" observedRunningTime="2025-12-01 20:09:07.569631614 +0000 UTC m=+2147.440661528" watchObservedRunningTime="2025-12-01 20:09:07.577161509 +0000 UTC m=+2147.448191433" Dec 01 20:09:20 crc kubenswrapper[4888]: I1201 20:09:20.038028 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:09:20 crc kubenswrapper[4888]: I1201 20:09:20.038507 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.037760 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.038176 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.038252 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.038941 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.038996 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" gracePeriod=600 Dec 01 20:09:50 crc kubenswrapper[4888]: E1201 20:09:50.164445 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.967023 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" exitCode=0 Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.967078 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3"} Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.967384 4888 scope.go:117] "RemoveContainer" containerID="18e67e9eb2c7daf55c7d4585721c7b3dddacba13d97cde03e2880c670e9ee89c" Dec 01 20:09:50 crc kubenswrapper[4888]: I1201 20:09:50.968347 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:09:50 crc kubenswrapper[4888]: E1201 20:09:50.968898 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:05 crc kubenswrapper[4888]: I1201 20:10:05.452086 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:10:05 crc kubenswrapper[4888]: E1201 20:10:05.454211 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:13 crc kubenswrapper[4888]: I1201 20:10:13.178852 4888 generic.go:334] "Generic (PLEG): container finished" podID="2e35aaa2-7b44-48c2-b94d-46f753c5698f" containerID="4b8659fe72d73922a84b323908a4de583cdcaa4db7c1bc83ba40e8f65d50b362" exitCode=0 Dec 01 20:10:13 crc kubenswrapper[4888]: I1201 20:10:13.179453 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" event={"ID":"2e35aaa2-7b44-48c2-b94d-46f753c5698f","Type":"ContainerDied","Data":"4b8659fe72d73922a84b323908a4de583cdcaa4db7c1bc83ba40e8f65d50b362"} Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.611373 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.687635 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory\") pod \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.687720 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrlxw\" (UniqueName: \"kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw\") pod \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.687757 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0\") pod \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.687782 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key\") pod \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.687801 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle\") pod \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\" (UID: \"2e35aaa2-7b44-48c2-b94d-46f753c5698f\") " Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.693234 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw" (OuterVolumeSpecName: "kube-api-access-xrlxw") pod "2e35aaa2-7b44-48c2-b94d-46f753c5698f" (UID: "2e35aaa2-7b44-48c2-b94d-46f753c5698f"). InnerVolumeSpecName "kube-api-access-xrlxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.693889 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2e35aaa2-7b44-48c2-b94d-46f753c5698f" (UID: "2e35aaa2-7b44-48c2-b94d-46f753c5698f"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.715420 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2e35aaa2-7b44-48c2-b94d-46f753c5698f" (UID: "2e35aaa2-7b44-48c2-b94d-46f753c5698f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.717121 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2e35aaa2-7b44-48c2-b94d-46f753c5698f" (UID: "2e35aaa2-7b44-48c2-b94d-46f753c5698f"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.743533 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory" (OuterVolumeSpecName: "inventory") pod "2e35aaa2-7b44-48c2-b94d-46f753c5698f" (UID: "2e35aaa2-7b44-48c2-b94d-46f753c5698f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.790214 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.790249 4888 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.790263 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2e35aaa2-7b44-48c2-b94d-46f753c5698f-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.790276 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrlxw\" (UniqueName: \"kubernetes.io/projected/2e35aaa2-7b44-48c2-b94d-46f753c5698f-kube-api-access-xrlxw\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:14 crc kubenswrapper[4888]: I1201 20:10:14.790289 4888 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2e35aaa2-7b44-48c2-b94d-46f753c5698f-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.197909 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" event={"ID":"2e35aaa2-7b44-48c2-b94d-46f753c5698f","Type":"ContainerDied","Data":"ba8d8c96c38286fcdf60130ac14ece6d6fa8d09b6e07b9a1cde4cbdf4dfcc7e6"} Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.198216 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba8d8c96c38286fcdf60130ac14ece6d6fa8d09b6e07b9a1cde4cbdf4dfcc7e6" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.197996 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-rtj2r" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.376012 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m"] Dec 01 20:10:15 crc kubenswrapper[4888]: E1201 20:10:15.376525 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e35aaa2-7b44-48c2-b94d-46f753c5698f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.376547 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e35aaa2-7b44-48c2-b94d-46f753c5698f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.376800 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e35aaa2-7b44-48c2-b94d-46f753c5698f" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.377573 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.382855 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.382905 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.383049 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.383241 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.383911 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.394159 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.401418 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m"] Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501544 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501585 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501624 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501642 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501661 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.501880 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzwpt\" (UniqueName: \"kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604415 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604467 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604496 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604516 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604536 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.604570 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzwpt\" (UniqueName: \"kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.608790 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.608954 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.609026 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.609453 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.612323 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.621268 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzwpt\" (UniqueName: \"kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:15 crc kubenswrapper[4888]: I1201 20:10:15.693999 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:10:16 crc kubenswrapper[4888]: I1201 20:10:16.226121 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m"] Dec 01 20:10:16 crc kubenswrapper[4888]: W1201 20:10:16.226367 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1abae65_0fe4_4a5c_afa8_824894f56643.slice/crio-3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b WatchSource:0}: Error finding container 3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b: Status 404 returned error can't find the container with id 3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b Dec 01 20:10:17 crc kubenswrapper[4888]: I1201 20:10:17.217803 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" event={"ID":"b1abae65-0fe4-4a5c-afa8-824894f56643","Type":"ContainerStarted","Data":"7548ed0a0aa5a46e6516e6ba686b0775b3d2ee0c3b74f5acf8c49f74d921891f"} Dec 01 20:10:17 crc kubenswrapper[4888]: I1201 20:10:17.218167 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" event={"ID":"b1abae65-0fe4-4a5c-afa8-824894f56643","Type":"ContainerStarted","Data":"3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b"} Dec 01 20:10:17 crc kubenswrapper[4888]: I1201 20:10:17.240290 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" podStartSLOduration=1.7498044130000001 podStartE2EDuration="2.240269611s" podCreationTimestamp="2025-12-01 20:10:15 +0000 UTC" firstStartedPulling="2025-12-01 20:10:16.228133263 +0000 UTC m=+2216.099163167" lastFinishedPulling="2025-12-01 20:10:16.718598451 +0000 UTC m=+2216.589628365" observedRunningTime="2025-12-01 20:10:17.233633761 +0000 UTC m=+2217.104663675" watchObservedRunningTime="2025-12-01 20:10:17.240269611 +0000 UTC m=+2217.111299525" Dec 01 20:10:18 crc kubenswrapper[4888]: I1201 20:10:18.451979 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:10:18 crc kubenswrapper[4888]: E1201 20:10:18.452259 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:33 crc kubenswrapper[4888]: I1201 20:10:33.451282 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:10:33 crc kubenswrapper[4888]: E1201 20:10:33.451952 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:44 crc kubenswrapper[4888]: I1201 20:10:44.451889 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:10:44 crc kubenswrapper[4888]: E1201 20:10:44.452800 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.831607 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.834482 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.852854 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.852967 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.853013 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkh2g\" (UniqueName: \"kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.853301 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.954292 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.954350 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkh2g\" (UniqueName: \"kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.954438 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.954847 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.954883 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:52 crc kubenswrapper[4888]: I1201 20:10:52.973620 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkh2g\" (UniqueName: \"kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g\") pod \"certified-operators-fgxlw\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:53 crc kubenswrapper[4888]: I1201 20:10:53.154520 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:10:53 crc kubenswrapper[4888]: I1201 20:10:53.693970 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:10:53 crc kubenswrapper[4888]: I1201 20:10:53.733580 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerStarted","Data":"60922ec2b1af672b15d992d02f84c69687d7a0cf541c730e690b34653a560817"} Dec 01 20:10:54 crc kubenswrapper[4888]: I1201 20:10:54.746202 4888 generic.go:334] "Generic (PLEG): container finished" podID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerID="9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f" exitCode=0 Dec 01 20:10:54 crc kubenswrapper[4888]: I1201 20:10:54.746306 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerDied","Data":"9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f"} Dec 01 20:10:56 crc kubenswrapper[4888]: I1201 20:10:56.452066 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:10:56 crc kubenswrapper[4888]: E1201 20:10:56.452583 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:10:56 crc kubenswrapper[4888]: I1201 20:10:56.764911 4888 generic.go:334] "Generic (PLEG): container finished" podID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerID="fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520" exitCode=0 Dec 01 20:10:56 crc kubenswrapper[4888]: I1201 20:10:56.764955 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerDied","Data":"fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520"} Dec 01 20:10:57 crc kubenswrapper[4888]: I1201 20:10:57.779295 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerStarted","Data":"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5"} Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.154905 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.157390 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.209269 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.239967 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fgxlw" podStartSLOduration=8.564708248 podStartE2EDuration="11.239940979s" podCreationTimestamp="2025-12-01 20:10:52 +0000 UTC" firstStartedPulling="2025-12-01 20:10:54.748478023 +0000 UTC m=+2254.619507937" lastFinishedPulling="2025-12-01 20:10:57.423710764 +0000 UTC m=+2257.294740668" observedRunningTime="2025-12-01 20:10:57.800462179 +0000 UTC m=+2257.671492093" watchObservedRunningTime="2025-12-01 20:11:03.239940979 +0000 UTC m=+2263.110970893" Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.879336 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:03 crc kubenswrapper[4888]: I1201 20:11:03.942201 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:11:05 crc kubenswrapper[4888]: I1201 20:11:05.847050 4888 generic.go:334] "Generic (PLEG): container finished" podID="b1abae65-0fe4-4a5c-afa8-824894f56643" containerID="7548ed0a0aa5a46e6516e6ba686b0775b3d2ee0c3b74f5acf8c49f74d921891f" exitCode=0 Dec 01 20:11:05 crc kubenswrapper[4888]: I1201 20:11:05.847135 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" event={"ID":"b1abae65-0fe4-4a5c-afa8-824894f56643","Type":"ContainerDied","Data":"7548ed0a0aa5a46e6516e6ba686b0775b3d2ee0c3b74f5acf8c49f74d921891f"} Dec 01 20:11:05 crc kubenswrapper[4888]: I1201 20:11:05.847698 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fgxlw" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="registry-server" containerID="cri-o://a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5" gracePeriod=2 Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.397280 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.418689 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkh2g\" (UniqueName: \"kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g\") pod \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.418903 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content\") pod \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.418940 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities\") pod \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\" (UID: \"e51dff1c-acf7-4b97-be49-cd9c4aa809c6\") " Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.420417 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities" (OuterVolumeSpecName: "utilities") pod "e51dff1c-acf7-4b97-be49-cd9c4aa809c6" (UID: "e51dff1c-acf7-4b97-be49-cd9c4aa809c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.435719 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g" (OuterVolumeSpecName: "kube-api-access-zkh2g") pod "e51dff1c-acf7-4b97-be49-cd9c4aa809c6" (UID: "e51dff1c-acf7-4b97-be49-cd9c4aa809c6"). InnerVolumeSpecName "kube-api-access-zkh2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.521869 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkh2g\" (UniqueName: \"kubernetes.io/projected/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-kube-api-access-zkh2g\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.522447 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.864539 4888 generic.go:334] "Generic (PLEG): container finished" podID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerID="a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5" exitCode=0 Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.864632 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerDied","Data":"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5"} Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.864680 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgxlw" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.864732 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgxlw" event={"ID":"e51dff1c-acf7-4b97-be49-cd9c4aa809c6","Type":"ContainerDied","Data":"60922ec2b1af672b15d992d02f84c69687d7a0cf541c730e690b34653a560817"} Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.864775 4888 scope.go:117] "RemoveContainer" containerID="a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.873909 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e51dff1c-acf7-4b97-be49-cd9c4aa809c6" (UID: "e51dff1c-acf7-4b97-be49-cd9c4aa809c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.911465 4888 scope.go:117] "RemoveContainer" containerID="fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.932720 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51dff1c-acf7-4b97-be49-cd9c4aa809c6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:06 crc kubenswrapper[4888]: I1201 20:11:06.933161 4888 scope.go:117] "RemoveContainer" containerID="9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.014352 4888 scope.go:117] "RemoveContainer" containerID="a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.015721 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5\": container with ID starting with a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5 not found: ID does not exist" containerID="a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.015747 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5"} err="failed to get container status \"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5\": rpc error: code = NotFound desc = could not find container \"a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5\": container with ID starting with a120edfd22c9d51cd8112d47c6a890045cbfce48a8eec075a4ae5d420e7f4fa5 not found: ID does not exist" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.015766 4888 scope.go:117] "RemoveContainer" containerID="fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.015983 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520\": container with ID starting with fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520 not found: ID does not exist" containerID="fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.016009 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520"} err="failed to get container status \"fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520\": rpc error: code = NotFound desc = could not find container \"fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520\": container with ID starting with fc6a9d15255dff97903887c2669c393ef8b57689293c1aacca6c8080c25f1520 not found: ID does not exist" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.016026 4888 scope.go:117] "RemoveContainer" containerID="9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.016225 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f\": container with ID starting with 9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f not found: ID does not exist" containerID="9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.016242 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f"} err="failed to get container status \"9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f\": rpc error: code = NotFound desc = could not find container \"9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f\": container with ID starting with 9bf31edeebf129bc2eccbc4b5efcab53d8387d3aee26819efb282c7c9c8df93f not found: ID does not exist" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.210060 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.221722 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fgxlw"] Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.400318 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.445652 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzwpt\" (UniqueName: \"kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.445769 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.445881 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.445937 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.446088 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.446142 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory\") pod \"b1abae65-0fe4-4a5c-afa8-824894f56643\" (UID: \"b1abae65-0fe4-4a5c-afa8-824894f56643\") " Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.453429 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.453972 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt" (OuterVolumeSpecName: "kube-api-access-vzwpt") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "kube-api-access-vzwpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.475753 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.477246 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory" (OuterVolumeSpecName: "inventory") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.479755 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.494535 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "b1abae65-0fe4-4a5c-afa8-824894f56643" (UID: "b1abae65-0fe4-4a5c-afa8-824894f56643"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556607 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzwpt\" (UniqueName: \"kubernetes.io/projected/b1abae65-0fe4-4a5c-afa8-824894f56643-kube-api-access-vzwpt\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556672 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556699 4888 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556721 4888 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556737 4888 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.556751 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b1abae65-0fe4-4a5c-afa8-824894f56643-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.875633 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.875697 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m" event={"ID":"b1abae65-0fe4-4a5c-afa8-824894f56643","Type":"ContainerDied","Data":"3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b"} Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.875862 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bd155ab3e59fb62fbb915140303e569bdf544879637976f68baf708932b2c1b" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.990560 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8"] Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.990955 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="extract-utilities" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.990971 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="extract-utilities" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.990985 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="extract-content" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.990991 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="extract-content" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.991006 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="registry-server" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.991012 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="registry-server" Dec 01 20:11:07 crc kubenswrapper[4888]: E1201 20:11:07.991033 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1abae65-0fe4-4a5c-afa8-824894f56643" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.991040 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1abae65-0fe4-4a5c-afa8-824894f56643" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.991204 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" containerName="registry-server" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.991230 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1abae65-0fe4-4a5c-afa8-824894f56643" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.991841 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.994263 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.994888 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.995251 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.995435 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:11:07 crc kubenswrapper[4888]: I1201 20:11:07.997161 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.005431 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8"] Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.065053 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.065244 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.065295 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sxjd\" (UniqueName: \"kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.065386 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.065436 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.167515 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.167582 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.167687 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.167749 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.167773 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sxjd\" (UniqueName: \"kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.172674 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.173295 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.173390 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.174814 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.183667 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sxjd\" (UniqueName: \"kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.323363 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.452383 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:11:08 crc kubenswrapper[4888]: E1201 20:11:08.453375 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.469539 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e51dff1c-acf7-4b97-be49-cd9c4aa809c6" path="/var/lib/kubelet/pods/e51dff1c-acf7-4b97-be49-cd9c4aa809c6/volumes" Dec 01 20:11:08 crc kubenswrapper[4888]: I1201 20:11:08.886916 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8"] Dec 01 20:11:08 crc kubenswrapper[4888]: W1201 20:11:08.896270 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5293702_c5a9_442d_b776_bed869af0d5d.slice/crio-7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86 WatchSource:0}: Error finding container 7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86: Status 404 returned error can't find the container with id 7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86 Dec 01 20:11:09 crc kubenswrapper[4888]: I1201 20:11:09.898776 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" event={"ID":"a5293702-c5a9-442d-b776-bed869af0d5d","Type":"ContainerStarted","Data":"7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86"} Dec 01 20:11:10 crc kubenswrapper[4888]: I1201 20:11:10.908069 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" event={"ID":"a5293702-c5a9-442d-b776-bed869af0d5d","Type":"ContainerStarted","Data":"6353578f2b2b6b04e2dbd33918572a772e53fc024279e88ead14c97f90181e82"} Dec 01 20:11:19 crc kubenswrapper[4888]: I1201 20:11:19.451099 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:11:19 crc kubenswrapper[4888]: E1201 20:11:19.453008 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:11:34 crc kubenswrapper[4888]: I1201 20:11:34.452530 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:11:34 crc kubenswrapper[4888]: E1201 20:11:34.453415 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:11:46 crc kubenswrapper[4888]: I1201 20:11:46.452994 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:11:46 crc kubenswrapper[4888]: E1201 20:11:46.454434 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:11:58 crc kubenswrapper[4888]: I1201 20:11:58.451746 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:11:58 crc kubenswrapper[4888]: E1201 20:11:58.452420 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:12:10 crc kubenswrapper[4888]: I1201 20:12:10.456337 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:12:10 crc kubenswrapper[4888]: E1201 20:12:10.458101 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:12:24 crc kubenswrapper[4888]: I1201 20:12:24.452114 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:12:24 crc kubenswrapper[4888]: E1201 20:12:24.453548 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:12:37 crc kubenswrapper[4888]: I1201 20:12:37.451299 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:12:37 crc kubenswrapper[4888]: E1201 20:12:37.452032 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:12:51 crc kubenswrapper[4888]: I1201 20:12:51.451769 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:12:51 crc kubenswrapper[4888]: E1201 20:12:51.452567 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:13:04 crc kubenswrapper[4888]: I1201 20:13:04.451288 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:13:04 crc kubenswrapper[4888]: E1201 20:13:04.452040 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:13:06 crc kubenswrapper[4888]: I1201 20:13:06.616637 4888 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-58df6cb45-qjhmp" podUID="a4b29995-f291-4e12-bfb1-fad0318b0416" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 01 20:13:19 crc kubenswrapper[4888]: I1201 20:13:19.451266 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:13:19 crc kubenswrapper[4888]: E1201 20:13:19.451952 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:13:33 crc kubenswrapper[4888]: I1201 20:13:33.451376 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:13:33 crc kubenswrapper[4888]: E1201 20:13:33.452352 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:13:47 crc kubenswrapper[4888]: I1201 20:13:47.452140 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:13:47 crc kubenswrapper[4888]: E1201 20:13:47.452950 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:13:58 crc kubenswrapper[4888]: I1201 20:13:58.451920 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:13:58 crc kubenswrapper[4888]: E1201 20:13:58.452748 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:14:05 crc kubenswrapper[4888]: I1201 20:14:05.431328 4888 scope.go:117] "RemoveContainer" containerID="a5aaf46ce8fad6ef44f88c5b180c98ead8e81b8c588a1e6bcea9848c02daaa20" Dec 01 20:14:05 crc kubenswrapper[4888]: I1201 20:14:05.457419 4888 scope.go:117] "RemoveContainer" containerID="75dda179c36b1bb952fee828083fa063bef1cbdd9d232b4929525645bea4afc1" Dec 01 20:14:05 crc kubenswrapper[4888]: I1201 20:14:05.486647 4888 scope.go:117] "RemoveContainer" containerID="e1338cedb8fcd728f481737e3400976a18dbc769f77698a8a1819f2e6b9f33a7" Dec 01 20:14:09 crc kubenswrapper[4888]: I1201 20:14:09.453142 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:14:09 crc kubenswrapper[4888]: E1201 20:14:09.454732 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:14:22 crc kubenswrapper[4888]: I1201 20:14:22.451456 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:14:22 crc kubenswrapper[4888]: E1201 20:14:22.452160 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:14:35 crc kubenswrapper[4888]: I1201 20:14:35.451579 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:14:35 crc kubenswrapper[4888]: E1201 20:14:35.452201 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:14:47 crc kubenswrapper[4888]: I1201 20:14:47.451824 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:14:47 crc kubenswrapper[4888]: E1201 20:14:47.452556 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.154621 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" podStartSLOduration=232.334911052 podStartE2EDuration="3m53.154588965s" podCreationTimestamp="2025-12-01 20:11:07 +0000 UTC" firstStartedPulling="2025-12-01 20:11:08.898499083 +0000 UTC m=+2268.769528997" lastFinishedPulling="2025-12-01 20:11:09.718176996 +0000 UTC m=+2269.589206910" observedRunningTime="2025-12-01 20:11:10.926157155 +0000 UTC m=+2270.797187089" watchObservedRunningTime="2025-12-01 20:15:00.154588965 +0000 UTC m=+2500.025618889" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.159002 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5"] Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.160646 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.168581 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.168588 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.184627 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5"] Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.339107 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2vbn\" (UniqueName: \"kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.339369 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.339475 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.440838 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2vbn\" (UniqueName: \"kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.440966 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.440997 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.443651 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.463405 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.466433 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2vbn\" (UniqueName: \"kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn\") pod \"collect-profiles-29410335-44vw5\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.495628 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:00 crc kubenswrapper[4888]: I1201 20:15:00.942890 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5"] Dec 01 20:15:01 crc kubenswrapper[4888]: I1201 20:15:01.129645 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" event={"ID":"00e8e46d-7709-42ce-ba84-f6d870d955a9","Type":"ContainerStarted","Data":"92a191eba4b5887eb6c1ca049b83de6084c8488bdcc3c2adfa1c1652baf4504c"} Dec 01 20:15:01 crc kubenswrapper[4888]: I1201 20:15:01.129715 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" event={"ID":"00e8e46d-7709-42ce-ba84-f6d870d955a9","Type":"ContainerStarted","Data":"1c3e39a9d3352a48bc8e8aa77f3cff586951646a1d7d4309c12ba0cac2aa1632"} Dec 01 20:15:01 crc kubenswrapper[4888]: I1201 20:15:01.150071 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" podStartSLOduration=1.150047011 podStartE2EDuration="1.150047011s" podCreationTimestamp="2025-12-01 20:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:15:01.146844739 +0000 UTC m=+2501.017874673" watchObservedRunningTime="2025-12-01 20:15:01.150047011 +0000 UTC m=+2501.021076925" Dec 01 20:15:01 crc kubenswrapper[4888]: I1201 20:15:01.451573 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:15:02 crc kubenswrapper[4888]: I1201 20:15:02.151209 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3"} Dec 01 20:15:02 crc kubenswrapper[4888]: I1201 20:15:02.160074 4888 generic.go:334] "Generic (PLEG): container finished" podID="00e8e46d-7709-42ce-ba84-f6d870d955a9" containerID="92a191eba4b5887eb6c1ca049b83de6084c8488bdcc3c2adfa1c1652baf4504c" exitCode=0 Dec 01 20:15:02 crc kubenswrapper[4888]: I1201 20:15:02.160117 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" event={"ID":"00e8e46d-7709-42ce-ba84-f6d870d955a9","Type":"ContainerDied","Data":"92a191eba4b5887eb6c1ca049b83de6084c8488bdcc3c2adfa1c1652baf4504c"} Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.490152 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.617717 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume\") pod \"00e8e46d-7709-42ce-ba84-f6d870d955a9\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.617792 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume\") pod \"00e8e46d-7709-42ce-ba84-f6d870d955a9\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.618033 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2vbn\" (UniqueName: \"kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn\") pod \"00e8e46d-7709-42ce-ba84-f6d870d955a9\" (UID: \"00e8e46d-7709-42ce-ba84-f6d870d955a9\") " Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.618488 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume" (OuterVolumeSpecName: "config-volume") pod "00e8e46d-7709-42ce-ba84-f6d870d955a9" (UID: "00e8e46d-7709-42ce-ba84-f6d870d955a9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.618769 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00e8e46d-7709-42ce-ba84-f6d870d955a9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.623687 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "00e8e46d-7709-42ce-ba84-f6d870d955a9" (UID: "00e8e46d-7709-42ce-ba84-f6d870d955a9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.631886 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn" (OuterVolumeSpecName: "kube-api-access-d2vbn") pod "00e8e46d-7709-42ce-ba84-f6d870d955a9" (UID: "00e8e46d-7709-42ce-ba84-f6d870d955a9"). InnerVolumeSpecName "kube-api-access-d2vbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.720590 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2vbn\" (UniqueName: \"kubernetes.io/projected/00e8e46d-7709-42ce-ba84-f6d870d955a9-kube-api-access-d2vbn\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:03 crc kubenswrapper[4888]: I1201 20:15:03.720944 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00e8e46d-7709-42ce-ba84-f6d870d955a9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.190810 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" event={"ID":"00e8e46d-7709-42ce-ba84-f6d870d955a9","Type":"ContainerDied","Data":"1c3e39a9d3352a48bc8e8aa77f3cff586951646a1d7d4309c12ba0cac2aa1632"} Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.190862 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c3e39a9d3352a48bc8e8aa77f3cff586951646a1d7d4309c12ba0cac2aa1632" Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.190884 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410335-44vw5" Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.222387 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm"] Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.229981 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410290-4flfm"] Dec 01 20:15:04 crc kubenswrapper[4888]: I1201 20:15:04.467157 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ba0afc8-40bb-446f-be9d-4532fe287240" path="/var/lib/kubelet/pods/3ba0afc8-40bb-446f-be9d-4532fe287240/volumes" Dec 01 20:15:05 crc kubenswrapper[4888]: I1201 20:15:05.559845 4888 scope.go:117] "RemoveContainer" containerID="b72a7e33253df98ce95b670c1b37184d43bc60dbb813b1343ec5d0b25fbb0257" Dec 01 20:15:39 crc kubenswrapper[4888]: I1201 20:15:39.574047 4888 generic.go:334] "Generic (PLEG): container finished" podID="a5293702-c5a9-442d-b776-bed869af0d5d" containerID="6353578f2b2b6b04e2dbd33918572a772e53fc024279e88ead14c97f90181e82" exitCode=0 Dec 01 20:15:39 crc kubenswrapper[4888]: I1201 20:15:39.574165 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" event={"ID":"a5293702-c5a9-442d-b776-bed869af0d5d","Type":"ContainerDied","Data":"6353578f2b2b6b04e2dbd33918572a772e53fc024279e88ead14c97f90181e82"} Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.112344 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.208091 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle\") pod \"a5293702-c5a9-442d-b776-bed869af0d5d\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.208219 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sxjd\" (UniqueName: \"kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd\") pod \"a5293702-c5a9-442d-b776-bed869af0d5d\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.208263 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory\") pod \"a5293702-c5a9-442d-b776-bed869af0d5d\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.208307 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0\") pod \"a5293702-c5a9-442d-b776-bed869af0d5d\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.208355 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key\") pod \"a5293702-c5a9-442d-b776-bed869af0d5d\" (UID: \"a5293702-c5a9-442d-b776-bed869af0d5d\") " Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.217554 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a5293702-c5a9-442d-b776-bed869af0d5d" (UID: "a5293702-c5a9-442d-b776-bed869af0d5d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.217865 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd" (OuterVolumeSpecName: "kube-api-access-4sxjd") pod "a5293702-c5a9-442d-b776-bed869af0d5d" (UID: "a5293702-c5a9-442d-b776-bed869af0d5d"). InnerVolumeSpecName "kube-api-access-4sxjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.251032 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "a5293702-c5a9-442d-b776-bed869af0d5d" (UID: "a5293702-c5a9-442d-b776-bed869af0d5d"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.255593 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory" (OuterVolumeSpecName: "inventory") pod "a5293702-c5a9-442d-b776-bed869af0d5d" (UID: "a5293702-c5a9-442d-b776-bed869af0d5d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.309955 4888 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.309989 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sxjd\" (UniqueName: \"kubernetes.io/projected/a5293702-c5a9-442d-b776-bed869af0d5d-kube-api-access-4sxjd\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.310000 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.310009 4888 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.327718 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a5293702-c5a9-442d-b776-bed869af0d5d" (UID: "a5293702-c5a9-442d-b776-bed869af0d5d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.411574 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a5293702-c5a9-442d-b776-bed869af0d5d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.593894 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" event={"ID":"a5293702-c5a9-442d-b776-bed869af0d5d","Type":"ContainerDied","Data":"7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86"} Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.594226 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b6619825d4b24c50203b858907661fd198305e5757b7d7ae2813841d1d56b86" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.594072 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.692918 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd"] Dec 01 20:15:41 crc kubenswrapper[4888]: E1201 20:15:41.699601 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00e8e46d-7709-42ce-ba84-f6d870d955a9" containerName="collect-profiles" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.699641 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="00e8e46d-7709-42ce-ba84-f6d870d955a9" containerName="collect-profiles" Dec 01 20:15:41 crc kubenswrapper[4888]: E1201 20:15:41.699682 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5293702-c5a9-442d-b776-bed869af0d5d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.699690 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5293702-c5a9-442d-b776-bed869af0d5d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.699928 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="00e8e46d-7709-42ce-ba84-f6d870d955a9" containerName="collect-profiles" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.699955 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5293702-c5a9-442d-b776-bed869af0d5d" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.700592 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.703321 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704029 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704419 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704512 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704512 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704520 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.704713 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.710316 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd"] Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.819357 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.819443 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.820683 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7f46\" (UniqueName: \"kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.820891 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.820998 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.821073 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.821204 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.821360 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.821407 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.923009 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.923075 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7f46\" (UniqueName: \"kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.923244 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.923293 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.923347 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.924302 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.924407 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.924474 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.924516 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.925688 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.927975 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.928458 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.928475 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.928941 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.929106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.929336 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.929597 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:41 crc kubenswrapper[4888]: I1201 20:15:41.947293 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7f46\" (UniqueName: \"kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46\") pod \"nova-edpm-deployment-openstack-edpm-ipam-m7dqd\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:42 crc kubenswrapper[4888]: I1201 20:15:42.023435 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:15:42 crc kubenswrapper[4888]: I1201 20:15:42.551545 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd"] Dec 01 20:15:42 crc kubenswrapper[4888]: I1201 20:15:42.562313 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:15:42 crc kubenswrapper[4888]: I1201 20:15:42.603497 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" event={"ID":"51a678f1-7309-4200-bf0f-8329f67d2a5c","Type":"ContainerStarted","Data":"b622520037d8d7542c5ff5cb04a351c468e7237304d8d893dfdaf72b75c7c042"} Dec 01 20:15:43 crc kubenswrapper[4888]: I1201 20:15:43.613731 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" event={"ID":"51a678f1-7309-4200-bf0f-8329f67d2a5c","Type":"ContainerStarted","Data":"697cdacf0cb99630243e3d5f12b8dbef4653dee82cf3b7a5653b0aefa14dafd4"} Dec 01 20:15:43 crc kubenswrapper[4888]: I1201 20:15:43.632140 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" podStartSLOduration=2.103889579 podStartE2EDuration="2.632118169s" podCreationTimestamp="2025-12-01 20:15:41 +0000 UTC" firstStartedPulling="2025-12-01 20:15:42.562012241 +0000 UTC m=+2542.433042155" lastFinishedPulling="2025-12-01 20:15:43.090240831 +0000 UTC m=+2542.961270745" observedRunningTime="2025-12-01 20:15:43.630594486 +0000 UTC m=+2543.501624450" watchObservedRunningTime="2025-12-01 20:15:43.632118169 +0000 UTC m=+2543.503148083" Dec 01 20:17:20 crc kubenswrapper[4888]: I1201 20:17:20.038599 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:17:20 crc kubenswrapper[4888]: I1201 20:17:20.039920 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.126728 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.129327 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.144637 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.150969 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwqtv\" (UniqueName: \"kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.151580 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.151821 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.254110 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwqtv\" (UniqueName: \"kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.254359 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.254444 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.255363 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.255432 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.275238 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwqtv\" (UniqueName: \"kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv\") pod \"community-operators-7hn4h\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:35 crc kubenswrapper[4888]: I1201 20:17:35.457321 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:36 crc kubenswrapper[4888]: I1201 20:17:36.010556 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:36 crc kubenswrapper[4888]: I1201 20:17:36.682759 4888 generic.go:334] "Generic (PLEG): container finished" podID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerID="2b112d674a20af4afad2154cfc34350bc51b9429c3469c59bdb3046b790c4b1d" exitCode=0 Dec 01 20:17:36 crc kubenswrapper[4888]: I1201 20:17:36.682831 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerDied","Data":"2b112d674a20af4afad2154cfc34350bc51b9429c3469c59bdb3046b790c4b1d"} Dec 01 20:17:36 crc kubenswrapper[4888]: I1201 20:17:36.683007 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerStarted","Data":"7d030146c282fd3965a933d01464f05d14dd0331c8ca562a54ac0e214d42e16f"} Dec 01 20:17:37 crc kubenswrapper[4888]: I1201 20:17:37.699674 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerStarted","Data":"2103e17735fe218471a2a1b300d499bdfa6b4f23d0c027801a4aa3ef02cfc9b3"} Dec 01 20:17:38 crc kubenswrapper[4888]: I1201 20:17:38.710526 4888 generic.go:334] "Generic (PLEG): container finished" podID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerID="2103e17735fe218471a2a1b300d499bdfa6b4f23d0c027801a4aa3ef02cfc9b3" exitCode=0 Dec 01 20:17:38 crc kubenswrapper[4888]: I1201 20:17:38.710585 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerDied","Data":"2103e17735fe218471a2a1b300d499bdfa6b4f23d0c027801a4aa3ef02cfc9b3"} Dec 01 20:17:39 crc kubenswrapper[4888]: I1201 20:17:39.722958 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerStarted","Data":"32d4c5ec429c07788e7d331548178e63ffc0a5d7624dd4f54dc124a3400d006e"} Dec 01 20:17:39 crc kubenswrapper[4888]: I1201 20:17:39.746918 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7hn4h" podStartSLOduration=2.182581857 podStartE2EDuration="4.746901102s" podCreationTimestamp="2025-12-01 20:17:35 +0000 UTC" firstStartedPulling="2025-12-01 20:17:36.687814688 +0000 UTC m=+2656.558844602" lastFinishedPulling="2025-12-01 20:17:39.252133933 +0000 UTC m=+2659.123163847" observedRunningTime="2025-12-01 20:17:39.740728105 +0000 UTC m=+2659.611758029" watchObservedRunningTime="2025-12-01 20:17:39.746901102 +0000 UTC m=+2659.617931016" Dec 01 20:17:45 crc kubenswrapper[4888]: I1201 20:17:45.458026 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:45 crc kubenswrapper[4888]: I1201 20:17:45.458956 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:45 crc kubenswrapper[4888]: I1201 20:17:45.520642 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:45 crc kubenswrapper[4888]: I1201 20:17:45.841449 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:45 crc kubenswrapper[4888]: I1201 20:17:45.901527 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:47 crc kubenswrapper[4888]: I1201 20:17:47.796092 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7hn4h" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="registry-server" containerID="cri-o://32d4c5ec429c07788e7d331548178e63ffc0a5d7624dd4f54dc124a3400d006e" gracePeriod=2 Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.809763 4888 generic.go:334] "Generic (PLEG): container finished" podID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerID="32d4c5ec429c07788e7d331548178e63ffc0a5d7624dd4f54dc124a3400d006e" exitCode=0 Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.809842 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerDied","Data":"32d4c5ec429c07788e7d331548178e63ffc0a5d7624dd4f54dc124a3400d006e"} Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.810132 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7hn4h" event={"ID":"4e5a2b77-aecd-4839-ae30-318daadc2336","Type":"ContainerDied","Data":"7d030146c282fd3965a933d01464f05d14dd0331c8ca562a54ac0e214d42e16f"} Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.810150 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d030146c282fd3965a933d01464f05d14dd0331c8ca562a54ac0e214d42e16f" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.830000 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.857526 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content\") pod \"4e5a2b77-aecd-4839-ae30-318daadc2336\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.857603 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwqtv\" (UniqueName: \"kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv\") pod \"4e5a2b77-aecd-4839-ae30-318daadc2336\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.857691 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities\") pod \"4e5a2b77-aecd-4839-ae30-318daadc2336\" (UID: \"4e5a2b77-aecd-4839-ae30-318daadc2336\") " Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.858940 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities" (OuterVolumeSpecName: "utilities") pod "4e5a2b77-aecd-4839-ae30-318daadc2336" (UID: "4e5a2b77-aecd-4839-ae30-318daadc2336"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.867536 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv" (OuterVolumeSpecName: "kube-api-access-zwqtv") pod "4e5a2b77-aecd-4839-ae30-318daadc2336" (UID: "4e5a2b77-aecd-4839-ae30-318daadc2336"). InnerVolumeSpecName "kube-api-access-zwqtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.920530 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e5a2b77-aecd-4839-ae30-318daadc2336" (UID: "4e5a2b77-aecd-4839-ae30-318daadc2336"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.959844 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.959886 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwqtv\" (UniqueName: \"kubernetes.io/projected/4e5a2b77-aecd-4839-ae30-318daadc2336-kube-api-access-zwqtv\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:48 crc kubenswrapper[4888]: I1201 20:17:48.959898 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e5a2b77-aecd-4839-ae30-318daadc2336-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:17:49 crc kubenswrapper[4888]: I1201 20:17:49.823286 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7hn4h" Dec 01 20:17:49 crc kubenswrapper[4888]: I1201 20:17:49.879820 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:49 crc kubenswrapper[4888]: I1201 20:17:49.887710 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7hn4h"] Dec 01 20:17:50 crc kubenswrapper[4888]: I1201 20:17:50.037435 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:17:50 crc kubenswrapper[4888]: I1201 20:17:50.037500 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:17:50 crc kubenswrapper[4888]: I1201 20:17:50.464270 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" path="/var/lib/kubelet/pods/4e5a2b77-aecd-4839-ae30-318daadc2336/volumes" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.301667 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:02 crc kubenswrapper[4888]: E1201 20:18:02.303329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="extract-utilities" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.303351 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="extract-utilities" Dec 01 20:18:02 crc kubenswrapper[4888]: E1201 20:18:02.303387 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="registry-server" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.303397 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="registry-server" Dec 01 20:18:02 crc kubenswrapper[4888]: E1201 20:18:02.303409 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="extract-content" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.303416 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="extract-content" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.303649 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e5a2b77-aecd-4839-ae30-318daadc2336" containerName="registry-server" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.305441 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.340440 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.441266 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.441316 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.441408 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whgx2\" (UniqueName: \"kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.543604 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whgx2\" (UniqueName: \"kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.543820 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.543843 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.544796 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.544879 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.583918 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whgx2\" (UniqueName: \"kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2\") pod \"redhat-operators-w5zg5\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:02 crc kubenswrapper[4888]: I1201 20:18:02.637558 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:03 crc kubenswrapper[4888]: I1201 20:18:03.120372 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:03 crc kubenswrapper[4888]: I1201 20:18:03.950625 4888 generic.go:334] "Generic (PLEG): container finished" podID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerID="ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3" exitCode=0 Dec 01 20:18:03 crc kubenswrapper[4888]: I1201 20:18:03.950726 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerDied","Data":"ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3"} Dec 01 20:18:03 crc kubenswrapper[4888]: I1201 20:18:03.950930 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerStarted","Data":"784edc345420cca88d0b4f4fa58c880db85fe9fd0b72e9cbaf42d109ec6ea835"} Dec 01 20:18:05 crc kubenswrapper[4888]: I1201 20:18:05.975028 4888 generic.go:334] "Generic (PLEG): container finished" podID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerID="a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f" exitCode=0 Dec 01 20:18:05 crc kubenswrapper[4888]: I1201 20:18:05.975155 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerDied","Data":"a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f"} Dec 01 20:18:07 crc kubenswrapper[4888]: I1201 20:18:07.997976 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerStarted","Data":"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806"} Dec 01 20:18:08 crc kubenswrapper[4888]: I1201 20:18:08.016338 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w5zg5" podStartSLOduration=3.096843384 podStartE2EDuration="6.016320531s" podCreationTimestamp="2025-12-01 20:18:02 +0000 UTC" firstStartedPulling="2025-12-01 20:18:03.954105322 +0000 UTC m=+2683.825135226" lastFinishedPulling="2025-12-01 20:18:06.873582449 +0000 UTC m=+2686.744612373" observedRunningTime="2025-12-01 20:18:08.012907123 +0000 UTC m=+2687.883937027" watchObservedRunningTime="2025-12-01 20:18:08.016320531 +0000 UTC m=+2687.887350445" Dec 01 20:18:12 crc kubenswrapper[4888]: I1201 20:18:12.637917 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:12 crc kubenswrapper[4888]: I1201 20:18:12.639035 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:12 crc kubenswrapper[4888]: I1201 20:18:12.682481 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:13 crc kubenswrapper[4888]: I1201 20:18:13.108255 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:13 crc kubenswrapper[4888]: I1201 20:18:13.164289 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.077117 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w5zg5" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="registry-server" containerID="cri-o://ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806" gracePeriod=2 Dec 01 20:18:15 crc kubenswrapper[4888]: E1201 20:18:15.400413 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c81f903_b1a2_4a71_91e0_c5a65c08baad.slice/crio-ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.594167 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.705750 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whgx2\" (UniqueName: \"kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2\") pod \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.706329 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities\") pod \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.706375 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content\") pod \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\" (UID: \"4c81f903-b1a2-4a71-91e0-c5a65c08baad\") " Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.706992 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities" (OuterVolumeSpecName: "utilities") pod "4c81f903-b1a2-4a71-91e0-c5a65c08baad" (UID: "4c81f903-b1a2-4a71-91e0-c5a65c08baad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.717321 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2" (OuterVolumeSpecName: "kube-api-access-whgx2") pod "4c81f903-b1a2-4a71-91e0-c5a65c08baad" (UID: "4c81f903-b1a2-4a71-91e0-c5a65c08baad"). InnerVolumeSpecName "kube-api-access-whgx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.808667 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.808703 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whgx2\" (UniqueName: \"kubernetes.io/projected/4c81f903-b1a2-4a71-91e0-c5a65c08baad-kube-api-access-whgx2\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.818985 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c81f903-b1a2-4a71-91e0-c5a65c08baad" (UID: "4c81f903-b1a2-4a71-91e0-c5a65c08baad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:18:15 crc kubenswrapper[4888]: I1201 20:18:15.909995 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c81f903-b1a2-4a71-91e0-c5a65c08baad-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.090598 4888 generic.go:334] "Generic (PLEG): container finished" podID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerID="ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806" exitCode=0 Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.090661 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerDied","Data":"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806"} Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.090689 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w5zg5" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.090725 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w5zg5" event={"ID":"4c81f903-b1a2-4a71-91e0-c5a65c08baad","Type":"ContainerDied","Data":"784edc345420cca88d0b4f4fa58c880db85fe9fd0b72e9cbaf42d109ec6ea835"} Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.090757 4888 scope.go:117] "RemoveContainer" containerID="ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.123126 4888 scope.go:117] "RemoveContainer" containerID="a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.142249 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.153040 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w5zg5"] Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.158986 4888 scope.go:117] "RemoveContainer" containerID="ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.213557 4888 scope.go:117] "RemoveContainer" containerID="ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806" Dec 01 20:18:16 crc kubenswrapper[4888]: E1201 20:18:16.214023 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806\": container with ID starting with ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806 not found: ID does not exist" containerID="ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.214068 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806"} err="failed to get container status \"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806\": rpc error: code = NotFound desc = could not find container \"ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806\": container with ID starting with ffa196f75ea70b2852df6e7cff527eaac8f240ebc1cb0e0f47597eb6f7d20806 not found: ID does not exist" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.214096 4888 scope.go:117] "RemoveContainer" containerID="a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f" Dec 01 20:18:16 crc kubenswrapper[4888]: E1201 20:18:16.214376 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f\": container with ID starting with a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f not found: ID does not exist" containerID="a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.214396 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f"} err="failed to get container status \"a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f\": rpc error: code = NotFound desc = could not find container \"a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f\": container with ID starting with a358b39412a8e801527b0de513fea299cfca1ddf32b32a6348e71765fa53230f not found: ID does not exist" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.214410 4888 scope.go:117] "RemoveContainer" containerID="ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3" Dec 01 20:18:16 crc kubenswrapper[4888]: E1201 20:18:16.214584 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3\": container with ID starting with ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3 not found: ID does not exist" containerID="ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.214608 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3"} err="failed to get container status \"ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3\": rpc error: code = NotFound desc = could not find container \"ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3\": container with ID starting with ef16f92a7e293c9f7c51630526e1433630ffcf231de5e7e59ff03765e3bc7bc3 not found: ID does not exist" Dec 01 20:18:16 crc kubenswrapper[4888]: I1201 20:18:16.461334 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" path="/var/lib/kubelet/pods/4c81f903-b1a2-4a71-91e0-c5a65c08baad/volumes" Dec 01 20:18:20 crc kubenswrapper[4888]: I1201 20:18:20.037394 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:18:20 crc kubenswrapper[4888]: I1201 20:18:20.037925 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:18:20 crc kubenswrapper[4888]: I1201 20:18:20.037972 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:18:20 crc kubenswrapper[4888]: I1201 20:18:20.038693 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:18:20 crc kubenswrapper[4888]: I1201 20:18:20.038753 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3" gracePeriod=600 Dec 01 20:18:21 crc kubenswrapper[4888]: I1201 20:18:21.146995 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3" exitCode=0 Dec 01 20:18:21 crc kubenswrapper[4888]: I1201 20:18:21.147037 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3"} Dec 01 20:18:21 crc kubenswrapper[4888]: I1201 20:18:21.147653 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4"} Dec 01 20:18:21 crc kubenswrapper[4888]: I1201 20:18:21.147694 4888 scope.go:117] "RemoveContainer" containerID="78071fcd5026e2d5ce531cbc1412537bfcbd2494bc432f51291561c659afa8b3" Dec 01 20:18:30 crc kubenswrapper[4888]: I1201 20:18:30.231382 4888 generic.go:334] "Generic (PLEG): container finished" podID="51a678f1-7309-4200-bf0f-8329f67d2a5c" containerID="697cdacf0cb99630243e3d5f12b8dbef4653dee82cf3b7a5653b0aefa14dafd4" exitCode=0 Dec 01 20:18:30 crc kubenswrapper[4888]: I1201 20:18:30.231469 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" event={"ID":"51a678f1-7309-4200-bf0f-8329f67d2a5c","Type":"ContainerDied","Data":"697cdacf0cb99630243e3d5f12b8dbef4653dee82cf3b7a5653b0aefa14dafd4"} Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.687521 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841340 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841416 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841475 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7f46\" (UniqueName: \"kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841508 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841536 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841663 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841679 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841719 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.841756 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1\") pod \"51a678f1-7309-4200-bf0f-8329f67d2a5c\" (UID: \"51a678f1-7309-4200-bf0f-8329f67d2a5c\") " Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.847875 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.853450 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46" (OuterVolumeSpecName: "kube-api-access-n7f46") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "kube-api-access-n7f46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.878094 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory" (OuterVolumeSpecName: "inventory") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.878199 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.881432 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.881780 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.882012 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.887688 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.893372 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "51a678f1-7309-4200-bf0f-8329f67d2a5c" (UID: "51a678f1-7309-4200-bf0f-8329f67d2a5c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944564 4888 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944625 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7f46\" (UniqueName: \"kubernetes.io/projected/51a678f1-7309-4200-bf0f-8329f67d2a5c-kube-api-access-n7f46\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944661 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944672 4888 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944681 4888 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944690 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944699 4888 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944707 4888 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:31 crc kubenswrapper[4888]: I1201 20:18:31.944715 4888 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/51a678f1-7309-4200-bf0f-8329f67d2a5c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.252976 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" event={"ID":"51a678f1-7309-4200-bf0f-8329f67d2a5c","Type":"ContainerDied","Data":"b622520037d8d7542c5ff5cb04a351c468e7237304d8d893dfdaf72b75c7c042"} Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.253021 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b622520037d8d7542c5ff5cb04a351c468e7237304d8d893dfdaf72b75c7c042" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.253064 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-m7dqd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.343294 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd"] Dec 01 20:18:32 crc kubenswrapper[4888]: E1201 20:18:32.343850 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="registry-server" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.343874 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="registry-server" Dec 01 20:18:32 crc kubenswrapper[4888]: E1201 20:18:32.343915 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="extract-utilities" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.343923 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="extract-utilities" Dec 01 20:18:32 crc kubenswrapper[4888]: E1201 20:18:32.343936 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="extract-content" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.343941 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="extract-content" Dec 01 20:18:32 crc kubenswrapper[4888]: E1201 20:18:32.343957 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51a678f1-7309-4200-bf0f-8329f67d2a5c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.343962 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="51a678f1-7309-4200-bf0f-8329f67d2a5c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.344178 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="51a678f1-7309-4200-bf0f-8329f67d2a5c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.344219 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c81f903-b1a2-4a71-91e0-c5a65c08baad" containerName="registry-server" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.344975 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.350046 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.350105 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.350930 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-w9kps" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.351002 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.353270 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.359588 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd"] Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.457829 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.457888 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.458075 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.458368 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm5tb\" (UniqueName: \"kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.458468 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.458667 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.458766 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561066 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561444 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561501 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561541 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm5tb\" (UniqueName: \"kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561610 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561697 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.561737 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.565438 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.566106 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.566976 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.567003 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.576705 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.579759 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.580364 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm5tb\" (UniqueName: \"kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:32 crc kubenswrapper[4888]: I1201 20:18:32.661377 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:18:33 crc kubenswrapper[4888]: I1201 20:18:33.174624 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd"] Dec 01 20:18:33 crc kubenswrapper[4888]: I1201 20:18:33.263322 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" event={"ID":"41059254-cd26-40bb-bd15-bd935fd4e7e1","Type":"ContainerStarted","Data":"965f7ee124656e43631013140f627be74223d5621e5edd83f71c138fa40ebbb8"} Dec 01 20:18:34 crc kubenswrapper[4888]: I1201 20:18:34.272227 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" event={"ID":"41059254-cd26-40bb-bd15-bd935fd4e7e1","Type":"ContainerStarted","Data":"3d0493145f5f82aed14e9c2a3ead9cfff4f8e4bd58f4d19dfe734eba41721dcd"} Dec 01 20:18:34 crc kubenswrapper[4888]: I1201 20:18:34.298511 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" podStartSLOduration=1.708724133 podStartE2EDuration="2.298490684s" podCreationTimestamp="2025-12-01 20:18:32 +0000 UTC" firstStartedPulling="2025-12-01 20:18:33.180823708 +0000 UTC m=+2713.051853622" lastFinishedPulling="2025-12-01 20:18:33.770590249 +0000 UTC m=+2713.641620173" observedRunningTime="2025-12-01 20:18:34.285892204 +0000 UTC m=+2714.156922108" watchObservedRunningTime="2025-12-01 20:18:34.298490684 +0000 UTC m=+2714.169520598" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.071859 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.075036 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.091426 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.277408 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsbb7\" (UniqueName: \"kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.277528 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.277570 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.379004 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsbb7\" (UniqueName: \"kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.379116 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.379166 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.379720 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.379748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.409480 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsbb7\" (UniqueName: \"kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7\") pod \"redhat-marketplace-sxcwt\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:53 crc kubenswrapper[4888]: I1201 20:18:53.695341 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:18:54 crc kubenswrapper[4888]: I1201 20:18:54.187654 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:18:54 crc kubenswrapper[4888]: I1201 20:18:54.465769 4888 generic.go:334] "Generic (PLEG): container finished" podID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerID="46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974" exitCode=0 Dec 01 20:18:54 crc kubenswrapper[4888]: I1201 20:18:54.466054 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerDied","Data":"46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974"} Dec 01 20:18:54 crc kubenswrapper[4888]: I1201 20:18:54.466159 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerStarted","Data":"4f349e01135312e9d2a1e9a0708813e6a398cc6740b916285627cc1053ac697b"} Dec 01 20:18:55 crc kubenswrapper[4888]: I1201 20:18:55.479035 4888 generic.go:334] "Generic (PLEG): container finished" podID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerID="70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072" exitCode=0 Dec 01 20:18:55 crc kubenswrapper[4888]: I1201 20:18:55.479076 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerDied","Data":"70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072"} Dec 01 20:18:56 crc kubenswrapper[4888]: I1201 20:18:56.501105 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerStarted","Data":"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0"} Dec 01 20:18:56 crc kubenswrapper[4888]: I1201 20:18:56.526841 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sxcwt" podStartSLOduration=2.044844862 podStartE2EDuration="3.526824619s" podCreationTimestamp="2025-12-01 20:18:53 +0000 UTC" firstStartedPulling="2025-12-01 20:18:54.46754464 +0000 UTC m=+2734.338574554" lastFinishedPulling="2025-12-01 20:18:55.949524387 +0000 UTC m=+2735.820554311" observedRunningTime="2025-12-01 20:18:56.522859266 +0000 UTC m=+2736.393889180" watchObservedRunningTime="2025-12-01 20:18:56.526824619 +0000 UTC m=+2736.397854533" Dec 01 20:19:03 crc kubenswrapper[4888]: I1201 20:19:03.698346 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:03 crc kubenswrapper[4888]: I1201 20:19:03.698958 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:03 crc kubenswrapper[4888]: I1201 20:19:03.747262 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:04 crc kubenswrapper[4888]: I1201 20:19:04.629360 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:04 crc kubenswrapper[4888]: I1201 20:19:04.672522 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:19:06 crc kubenswrapper[4888]: I1201 20:19:06.599722 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sxcwt" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="registry-server" containerID="cri-o://eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0" gracePeriod=2 Dec 01 20:19:06 crc kubenswrapper[4888]: E1201 20:19:06.706211 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8ebe899_0710_4d92_8999_98b4fdf5eede.slice/crio-eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0.scope\": RecentStats: unable to find data in memory cache]" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.031770 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.148203 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content\") pod \"f8ebe899-0710-4d92-8999-98b4fdf5eede\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.148337 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsbb7\" (UniqueName: \"kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7\") pod \"f8ebe899-0710-4d92-8999-98b4fdf5eede\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.148392 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities\") pod \"f8ebe899-0710-4d92-8999-98b4fdf5eede\" (UID: \"f8ebe899-0710-4d92-8999-98b4fdf5eede\") " Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.149363 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities" (OuterVolumeSpecName: "utilities") pod "f8ebe899-0710-4d92-8999-98b4fdf5eede" (UID: "f8ebe899-0710-4d92-8999-98b4fdf5eede"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.169429 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7" (OuterVolumeSpecName: "kube-api-access-nsbb7") pod "f8ebe899-0710-4d92-8999-98b4fdf5eede" (UID: "f8ebe899-0710-4d92-8999-98b4fdf5eede"). InnerVolumeSpecName "kube-api-access-nsbb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.182823 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8ebe899-0710-4d92-8999-98b4fdf5eede" (UID: "f8ebe899-0710-4d92-8999-98b4fdf5eede"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.250420 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.250469 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsbb7\" (UniqueName: \"kubernetes.io/projected/f8ebe899-0710-4d92-8999-98b4fdf5eede-kube-api-access-nsbb7\") on node \"crc\" DevicePath \"\"" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.250489 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8ebe899-0710-4d92-8999-98b4fdf5eede-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.609770 4888 generic.go:334] "Generic (PLEG): container finished" podID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerID="eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0" exitCode=0 Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.609817 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerDied","Data":"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0"} Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.609848 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sxcwt" event={"ID":"f8ebe899-0710-4d92-8999-98b4fdf5eede","Type":"ContainerDied","Data":"4f349e01135312e9d2a1e9a0708813e6a398cc6740b916285627cc1053ac697b"} Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.609846 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sxcwt" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.609863 4888 scope.go:117] "RemoveContainer" containerID="eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.648349 4888 scope.go:117] "RemoveContainer" containerID="70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.652709 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.661758 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sxcwt"] Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.675411 4888 scope.go:117] "RemoveContainer" containerID="46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.720068 4888 scope.go:117] "RemoveContainer" containerID="eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0" Dec 01 20:19:07 crc kubenswrapper[4888]: E1201 20:19:07.720459 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0\": container with ID starting with eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0 not found: ID does not exist" containerID="eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.720495 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0"} err="failed to get container status \"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0\": rpc error: code = NotFound desc = could not find container \"eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0\": container with ID starting with eeb64f003d25d1d6c7f67fbb76efe3742b3ebbc95909dd642ae5128fc3fd51c0 not found: ID does not exist" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.720520 4888 scope.go:117] "RemoveContainer" containerID="70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072" Dec 01 20:19:07 crc kubenswrapper[4888]: E1201 20:19:07.720809 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072\": container with ID starting with 70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072 not found: ID does not exist" containerID="70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.720836 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072"} err="failed to get container status \"70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072\": rpc error: code = NotFound desc = could not find container \"70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072\": container with ID starting with 70d7bb32e2e7ed3e198a01a7f6b4235d1d43f5fbcef841f138dc12a73f567072 not found: ID does not exist" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.720851 4888 scope.go:117] "RemoveContainer" containerID="46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974" Dec 01 20:19:07 crc kubenswrapper[4888]: E1201 20:19:07.721125 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974\": container with ID starting with 46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974 not found: ID does not exist" containerID="46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974" Dec 01 20:19:07 crc kubenswrapper[4888]: I1201 20:19:07.721149 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974"} err="failed to get container status \"46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974\": rpc error: code = NotFound desc = could not find container \"46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974\": container with ID starting with 46b2836bfff74be6b33314197863e56aa3d3fee40371885e7efa692777304974 not found: ID does not exist" Dec 01 20:19:08 crc kubenswrapper[4888]: I1201 20:19:08.462032 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" path="/var/lib/kubelet/pods/f8ebe899-0710-4d92-8999-98b4fdf5eede/volumes" Dec 01 20:20:06 crc kubenswrapper[4888]: I1201 20:20:06.610163 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/neutron-58df6cb45-qjhmp" podUID="a4b29995-f291-4e12-bfb1-fad0318b0416" containerName="neutron-api" probeResult="failure" output="HTTP probe failed with statuscode: 502" Dec 01 20:20:20 crc kubenswrapper[4888]: I1201 20:20:20.038098 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:20:20 crc kubenswrapper[4888]: I1201 20:20:20.038664 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:20:50 crc kubenswrapper[4888]: I1201 20:20:50.038226 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:20:50 crc kubenswrapper[4888]: I1201 20:20:50.038678 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:20:55 crc kubenswrapper[4888]: I1201 20:20:55.619884 4888 generic.go:334] "Generic (PLEG): container finished" podID="41059254-cd26-40bb-bd15-bd935fd4e7e1" containerID="3d0493145f5f82aed14e9c2a3ead9cfff4f8e4bd58f4d19dfe734eba41721dcd" exitCode=0 Dec 01 20:20:55 crc kubenswrapper[4888]: I1201 20:20:55.619978 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" event={"ID":"41059254-cd26-40bb-bd15-bd935fd4e7e1","Type":"ContainerDied","Data":"3d0493145f5f82aed14e9c2a3ead9cfff4f8e4bd58f4d19dfe734eba41721dcd"} Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.071133 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.267960 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268084 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268170 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268299 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268349 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268395 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.268585 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm5tb\" (UniqueName: \"kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb\") pod \"41059254-cd26-40bb-bd15-bd935fd4e7e1\" (UID: \"41059254-cd26-40bb-bd15-bd935fd4e7e1\") " Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.274068 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb" (OuterVolumeSpecName: "kube-api-access-qm5tb") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "kube-api-access-qm5tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.274339 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.299548 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.299933 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory" (OuterVolumeSpecName: "inventory") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.302723 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.307073 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.314093 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "41059254-cd26-40bb-bd15-bd935fd4e7e1" (UID: "41059254-cd26-40bb-bd15-bd935fd4e7e1"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370388 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm5tb\" (UniqueName: \"kubernetes.io/projected/41059254-cd26-40bb-bd15-bd935fd4e7e1-kube-api-access-qm5tb\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370425 4888 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370440 4888 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370452 4888 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370461 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370471 4888 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.370482 4888 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/41059254-cd26-40bb-bd15-bd935fd4e7e1-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.640648 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" event={"ID":"41059254-cd26-40bb-bd15-bd935fd4e7e1","Type":"ContainerDied","Data":"965f7ee124656e43631013140f627be74223d5621e5edd83f71c138fa40ebbb8"} Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.640687 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="965f7ee124656e43631013140f627be74223d5621e5edd83f71c138fa40ebbb8" Dec 01 20:20:57 crc kubenswrapper[4888]: I1201 20:20:57.640718 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.037874 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.038251 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.038295 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.038999 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.039052 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" gracePeriod=600 Dec 01 20:21:20 crc kubenswrapper[4888]: E1201 20:21:20.157554 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.863702 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" exitCode=0 Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.863755 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4"} Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.863787 4888 scope.go:117] "RemoveContainer" containerID="2e8c314151fbe3c664301983f785580cd7c36318905ca5dabb26c0ff6f0911b3" Dec 01 20:21:20 crc kubenswrapper[4888]: I1201 20:21:20.864569 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:21:20 crc kubenswrapper[4888]: E1201 20:21:20.865007 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:21:35 crc kubenswrapper[4888]: I1201 20:21:35.451623 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:21:35 crc kubenswrapper[4888]: E1201 20:21:35.452432 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.566044 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:21:48 crc kubenswrapper[4888]: E1201 20:21:48.566979 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="registry-server" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.566994 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="registry-server" Dec 01 20:21:48 crc kubenswrapper[4888]: E1201 20:21:48.567012 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41059254-cd26-40bb-bd15-bd935fd4e7e1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567021 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="41059254-cd26-40bb-bd15-bd935fd4e7e1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:21:48 crc kubenswrapper[4888]: E1201 20:21:48.567036 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="extract-utilities" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567043 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="extract-utilities" Dec 01 20:21:48 crc kubenswrapper[4888]: E1201 20:21:48.567072 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="extract-content" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567077 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="extract-content" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567283 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="41059254-cd26-40bb-bd15-bd935fd4e7e1" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567346 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ebe899-0710-4d92-8999-98b4fdf5eede" containerName="registry-server" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.567958 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.570698 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.570951 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mp6w7" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.572034 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.572416 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.580288 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.709655 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.709749 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.709901 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710066 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710232 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710305 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw5g4\" (UniqueName: \"kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710329 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710425 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.710463 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.811971 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812029 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812071 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812127 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812164 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw5g4\" (UniqueName: \"kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812211 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812278 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812314 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812365 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.812967 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.813102 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.814131 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.814149 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.814513 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.819008 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.819173 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.822678 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.835412 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw5g4\" (UniqueName: \"kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.845407 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " pod="openstack/tempest-tests-tempest" Dec 01 20:21:48 crc kubenswrapper[4888]: I1201 20:21:48.907603 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:21:49 crc kubenswrapper[4888]: I1201 20:21:49.428091 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 20:21:49 crc kubenswrapper[4888]: I1201 20:21:49.433222 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:21:49 crc kubenswrapper[4888]: I1201 20:21:49.451809 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:21:49 crc kubenswrapper[4888]: E1201 20:21:49.452181 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:21:50 crc kubenswrapper[4888]: I1201 20:21:50.149974 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"512845e9-2357-4129-bfb0-4e636ea554e9","Type":"ContainerStarted","Data":"bab0ec730f6c9a6ddafe6f86f8c35b0d85f385d36ca3d3c5aecbf1acd5dd04ee"} Dec 01 20:22:02 crc kubenswrapper[4888]: I1201 20:22:02.451697 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:22:02 crc kubenswrapper[4888]: E1201 20:22:02.452448 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:22:14 crc kubenswrapper[4888]: I1201 20:22:14.452237 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:22:14 crc kubenswrapper[4888]: E1201 20:22:14.453053 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:22:20 crc kubenswrapper[4888]: E1201 20:22:20.376422 4888 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 01 20:22:20 crc kubenswrapper[4888]: E1201 20:22:20.376946 4888 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xw5g4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(512845e9-2357-4129-bfb0-4e636ea554e9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 20:22:20 crc kubenswrapper[4888]: E1201 20:22:20.378627 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="512845e9-2357-4129-bfb0-4e636ea554e9" Dec 01 20:22:20 crc kubenswrapper[4888]: E1201 20:22:20.511816 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="512845e9-2357-4129-bfb0-4e636ea554e9" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.506085 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.508817 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.522919 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.554937 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.555398 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrr5j\" (UniqueName: \"kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.555663 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.657158 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.657234 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrr5j\" (UniqueName: \"kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.657269 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.658310 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.658445 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.696146 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrr5j\" (UniqueName: \"kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j\") pod \"certified-operators-x85cd\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:23 crc kubenswrapper[4888]: I1201 20:22:23.826727 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:24 crc kubenswrapper[4888]: I1201 20:22:24.429140 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:24 crc kubenswrapper[4888]: I1201 20:22:24.560387 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerStarted","Data":"fc1e989b069ddba3a4214394f122b296730d5852f7f50010e1399a16b4582eb2"} Dec 01 20:22:25 crc kubenswrapper[4888]: I1201 20:22:25.571756 4888 generic.go:334] "Generic (PLEG): container finished" podID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerID="26313f399f1709b04c5e5dd0c73e0226190f83b541d03c04453bf969682b63b4" exitCode=0 Dec 01 20:22:25 crc kubenswrapper[4888]: I1201 20:22:25.571798 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerDied","Data":"26313f399f1709b04c5e5dd0c73e0226190f83b541d03c04453bf969682b63b4"} Dec 01 20:22:26 crc kubenswrapper[4888]: I1201 20:22:26.451805 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:22:26 crc kubenswrapper[4888]: E1201 20:22:26.452195 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:22:26 crc kubenswrapper[4888]: I1201 20:22:26.584204 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerStarted","Data":"ffe70f21c9e6931c21f66168aea08a1ae08925dfdc6a8993c675def59f051918"} Dec 01 20:22:27 crc kubenswrapper[4888]: I1201 20:22:27.599862 4888 generic.go:334] "Generic (PLEG): container finished" podID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerID="ffe70f21c9e6931c21f66168aea08a1ae08925dfdc6a8993c675def59f051918" exitCode=0 Dec 01 20:22:27 crc kubenswrapper[4888]: I1201 20:22:27.599913 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerDied","Data":"ffe70f21c9e6931c21f66168aea08a1ae08925dfdc6a8993c675def59f051918"} Dec 01 20:22:29 crc kubenswrapper[4888]: I1201 20:22:29.623348 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerStarted","Data":"47d0bab54127c07ea86d909b2b3c5051a916aa6c0c039cc4148bd750801a41af"} Dec 01 20:22:29 crc kubenswrapper[4888]: I1201 20:22:29.646488 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x85cd" podStartSLOduration=2.8903177700000002 podStartE2EDuration="6.646463817s" podCreationTimestamp="2025-12-01 20:22:23 +0000 UTC" firstStartedPulling="2025-12-01 20:22:25.574325393 +0000 UTC m=+2945.445355317" lastFinishedPulling="2025-12-01 20:22:29.33047145 +0000 UTC m=+2949.201501364" observedRunningTime="2025-12-01 20:22:29.637937863 +0000 UTC m=+2949.508967777" watchObservedRunningTime="2025-12-01 20:22:29.646463817 +0000 UTC m=+2949.517493741" Dec 01 20:22:32 crc kubenswrapper[4888]: I1201 20:22:32.954303 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 20:22:33 crc kubenswrapper[4888]: I1201 20:22:33.830475 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:33 crc kubenswrapper[4888]: I1201 20:22:33.830970 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:33 crc kubenswrapper[4888]: I1201 20:22:33.883811 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:34 crc kubenswrapper[4888]: I1201 20:22:34.678117 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"512845e9-2357-4129-bfb0-4e636ea554e9","Type":"ContainerStarted","Data":"a8a640282116068571f24b32b5ad187630826a8b00e08e7f70d1f0fd7fc34a3d"} Dec 01 20:22:34 crc kubenswrapper[4888]: I1201 20:22:34.702405 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.18350404 podStartE2EDuration="47.702384052s" podCreationTimestamp="2025-12-01 20:21:47 +0000 UTC" firstStartedPulling="2025-12-01 20:21:49.432992681 +0000 UTC m=+2909.304022595" lastFinishedPulling="2025-12-01 20:22:32.951872693 +0000 UTC m=+2952.822902607" observedRunningTime="2025-12-01 20:22:34.699052457 +0000 UTC m=+2954.570082381" watchObservedRunningTime="2025-12-01 20:22:34.702384052 +0000 UTC m=+2954.573413956" Dec 01 20:22:34 crc kubenswrapper[4888]: I1201 20:22:34.736778 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:34 crc kubenswrapper[4888]: I1201 20:22:34.782458 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:36 crc kubenswrapper[4888]: I1201 20:22:36.694745 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x85cd" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="registry-server" containerID="cri-o://47d0bab54127c07ea86d909b2b3c5051a916aa6c0c039cc4148bd750801a41af" gracePeriod=2 Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.707740 4888 generic.go:334] "Generic (PLEG): container finished" podID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerID="47d0bab54127c07ea86d909b2b3c5051a916aa6c0c039cc4148bd750801a41af" exitCode=0 Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.707808 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerDied","Data":"47d0bab54127c07ea86d909b2b3c5051a916aa6c0c039cc4148bd750801a41af"} Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.708144 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x85cd" event={"ID":"db613a1b-c46f-4a5f-9cfe-2573da344378","Type":"ContainerDied","Data":"fc1e989b069ddba3a4214394f122b296730d5852f7f50010e1399a16b4582eb2"} Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.708165 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc1e989b069ddba3a4214394f122b296730d5852f7f50010e1399a16b4582eb2" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.725567 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.851193 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities\") pod \"db613a1b-c46f-4a5f-9cfe-2573da344378\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.851247 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content\") pod \"db613a1b-c46f-4a5f-9cfe-2573da344378\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.851456 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrr5j\" (UniqueName: \"kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j\") pod \"db613a1b-c46f-4a5f-9cfe-2573da344378\" (UID: \"db613a1b-c46f-4a5f-9cfe-2573da344378\") " Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.852287 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities" (OuterVolumeSpecName: "utilities") pod "db613a1b-c46f-4a5f-9cfe-2573da344378" (UID: "db613a1b-c46f-4a5f-9cfe-2573da344378"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.858381 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j" (OuterVolumeSpecName: "kube-api-access-rrr5j") pod "db613a1b-c46f-4a5f-9cfe-2573da344378" (UID: "db613a1b-c46f-4a5f-9cfe-2573da344378"). InnerVolumeSpecName "kube-api-access-rrr5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.901863 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db613a1b-c46f-4a5f-9cfe-2573da344378" (UID: "db613a1b-c46f-4a5f-9cfe-2573da344378"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.954198 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrr5j\" (UniqueName: \"kubernetes.io/projected/db613a1b-c46f-4a5f-9cfe-2573da344378-kube-api-access-rrr5j\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.954243 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:37 crc kubenswrapper[4888]: I1201 20:22:37.954256 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db613a1b-c46f-4a5f-9cfe-2573da344378-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:22:38 crc kubenswrapper[4888]: I1201 20:22:38.715389 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x85cd" Dec 01 20:22:38 crc kubenswrapper[4888]: I1201 20:22:38.755276 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:38 crc kubenswrapper[4888]: I1201 20:22:38.765822 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x85cd"] Dec 01 20:22:40 crc kubenswrapper[4888]: I1201 20:22:40.471070 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:22:40 crc kubenswrapper[4888]: E1201 20:22:40.474260 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:22:40 crc kubenswrapper[4888]: I1201 20:22:40.491111 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" path="/var/lib/kubelet/pods/db613a1b-c46f-4a5f-9cfe-2573da344378/volumes" Dec 01 20:22:54 crc kubenswrapper[4888]: I1201 20:22:54.451455 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:22:54 crc kubenswrapper[4888]: E1201 20:22:54.452306 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:23:07 crc kubenswrapper[4888]: I1201 20:23:07.451357 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:23:07 crc kubenswrapper[4888]: E1201 20:23:07.453325 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:23:22 crc kubenswrapper[4888]: I1201 20:23:22.451440 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:23:22 crc kubenswrapper[4888]: E1201 20:23:22.452158 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:23:37 crc kubenswrapper[4888]: I1201 20:23:37.451904 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:23:37 crc kubenswrapper[4888]: E1201 20:23:37.452689 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:23:49 crc kubenswrapper[4888]: I1201 20:23:49.451803 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:23:49 crc kubenswrapper[4888]: E1201 20:23:49.452810 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:24:02 crc kubenswrapper[4888]: I1201 20:24:02.450769 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:24:02 crc kubenswrapper[4888]: E1201 20:24:02.451487 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:24:05 crc kubenswrapper[4888]: I1201 20:24:05.794875 4888 scope.go:117] "RemoveContainer" containerID="32d4c5ec429c07788e7d331548178e63ffc0a5d7624dd4f54dc124a3400d006e" Dec 01 20:24:05 crc kubenswrapper[4888]: I1201 20:24:05.816134 4888 scope.go:117] "RemoveContainer" containerID="2103e17735fe218471a2a1b300d499bdfa6b4f23d0c027801a4aa3ef02cfc9b3" Dec 01 20:24:05 crc kubenswrapper[4888]: I1201 20:24:05.835988 4888 scope.go:117] "RemoveContainer" containerID="2b112d674a20af4afad2154cfc34350bc51b9429c3469c59bdb3046b790c4b1d" Dec 01 20:24:17 crc kubenswrapper[4888]: I1201 20:24:17.451885 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:24:17 crc kubenswrapper[4888]: E1201 20:24:17.453116 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:24:30 crc kubenswrapper[4888]: I1201 20:24:30.457318 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:24:30 crc kubenswrapper[4888]: E1201 20:24:30.458129 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:24:45 crc kubenswrapper[4888]: I1201 20:24:45.451693 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:24:45 crc kubenswrapper[4888]: E1201 20:24:45.452555 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:24:57 crc kubenswrapper[4888]: I1201 20:24:57.451160 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:24:57 crc kubenswrapper[4888]: E1201 20:24:57.452391 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:25:10 crc kubenswrapper[4888]: I1201 20:25:10.458716 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:25:10 crc kubenswrapper[4888]: E1201 20:25:10.459665 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:25:25 crc kubenswrapper[4888]: I1201 20:25:25.451727 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:25:25 crc kubenswrapper[4888]: E1201 20:25:25.452526 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:25:38 crc kubenswrapper[4888]: I1201 20:25:38.452406 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:25:38 crc kubenswrapper[4888]: E1201 20:25:38.454037 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:25:53 crc kubenswrapper[4888]: I1201 20:25:53.451883 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:25:53 crc kubenswrapper[4888]: E1201 20:25:53.452904 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:26:04 crc kubenswrapper[4888]: I1201 20:26:04.456434 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:26:04 crc kubenswrapper[4888]: E1201 20:26:04.457254 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:26:16 crc kubenswrapper[4888]: I1201 20:26:16.452488 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:26:16 crc kubenswrapper[4888]: E1201 20:26:16.453642 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:26:27 crc kubenswrapper[4888]: I1201 20:26:27.451653 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:26:27 crc kubenswrapper[4888]: I1201 20:26:27.781025 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2"} Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.726573 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:23 crc kubenswrapper[4888]: E1201 20:28:23.728829 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="extract-utilities" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.728975 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="extract-utilities" Dec 01 20:28:23 crc kubenswrapper[4888]: E1201 20:28:23.729093 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="registry-server" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.729294 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="registry-server" Dec 01 20:28:23 crc kubenswrapper[4888]: E1201 20:28:23.729445 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="extract-content" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.729558 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="extract-content" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.730004 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="db613a1b-c46f-4a5f-9cfe-2573da344378" containerName="registry-server" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.732331 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.740956 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.872965 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zwkd\" (UniqueName: \"kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.873599 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.873756 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.976352 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.976401 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.976446 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zwkd\" (UniqueName: \"kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.976852 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:23 crc kubenswrapper[4888]: I1201 20:28:23.976942 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:24 crc kubenswrapper[4888]: I1201 20:28:23.999857 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zwkd\" (UniqueName: \"kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd\") pod \"community-operators-zzmhp\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:24 crc kubenswrapper[4888]: I1201 20:28:24.061318 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:24 crc kubenswrapper[4888]: I1201 20:28:24.615965 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:24 crc kubenswrapper[4888]: W1201 20:28:24.617672 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod27065557_9c10_446a_80ff_de365f6f5b4d.slice/crio-4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966 WatchSource:0}: Error finding container 4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966: Status 404 returned error can't find the container with id 4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966 Dec 01 20:28:24 crc kubenswrapper[4888]: I1201 20:28:24.830070 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerStarted","Data":"de7177e7d0f6dc6bccd9229ad5dc1d30639c11656e376ee5455e5a7e5d7ac87f"} Dec 01 20:28:24 crc kubenswrapper[4888]: I1201 20:28:24.830452 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerStarted","Data":"4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966"} Dec 01 20:28:25 crc kubenswrapper[4888]: I1201 20:28:25.840706 4888 generic.go:334] "Generic (PLEG): container finished" podID="27065557-9c10-446a-80ff-de365f6f5b4d" containerID="de7177e7d0f6dc6bccd9229ad5dc1d30639c11656e376ee5455e5a7e5d7ac87f" exitCode=0 Dec 01 20:28:25 crc kubenswrapper[4888]: I1201 20:28:25.840760 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerDied","Data":"de7177e7d0f6dc6bccd9229ad5dc1d30639c11656e376ee5455e5a7e5d7ac87f"} Dec 01 20:28:25 crc kubenswrapper[4888]: I1201 20:28:25.843765 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:28:27 crc kubenswrapper[4888]: I1201 20:28:27.859680 4888 generic.go:334] "Generic (PLEG): container finished" podID="27065557-9c10-446a-80ff-de365f6f5b4d" containerID="8af216b28f7abf966bf05d957b91b8b4f6ecb1cdd43db25386ed14d8f933d0dd" exitCode=0 Dec 01 20:28:27 crc kubenswrapper[4888]: I1201 20:28:27.859772 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerDied","Data":"8af216b28f7abf966bf05d957b91b8b4f6ecb1cdd43db25386ed14d8f933d0dd"} Dec 01 20:28:28 crc kubenswrapper[4888]: I1201 20:28:28.869838 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerStarted","Data":"5a1acab26ed5f8e4802a026e26674d5683debb6b3e039e7e3c5e4bb676594408"} Dec 01 20:28:28 crc kubenswrapper[4888]: I1201 20:28:28.890598 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zzmhp" podStartSLOduration=3.365343203 podStartE2EDuration="5.890573279s" podCreationTimestamp="2025-12-01 20:28:23 +0000 UTC" firstStartedPulling="2025-12-01 20:28:25.843308778 +0000 UTC m=+3305.714338722" lastFinishedPulling="2025-12-01 20:28:28.368538884 +0000 UTC m=+3308.239568798" observedRunningTime="2025-12-01 20:28:28.885043713 +0000 UTC m=+3308.756073637" watchObservedRunningTime="2025-12-01 20:28:28.890573279 +0000 UTC m=+3308.761603193" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.026217 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.033005 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.041808 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.061442 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.062066 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.116752 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.163233 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgsz5\" (UniqueName: \"kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.163317 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.163517 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.265681 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.265767 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgsz5\" (UniqueName: \"kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.265817 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.266223 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.266289 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.291485 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgsz5\" (UniqueName: \"kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5\") pod \"redhat-operators-cb27v\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.365088 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.830520 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.924526 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerStarted","Data":"4465b361348a6dba5681fd4e3c3f18a13b347b7cd758cd875af85a9a182140f9"} Dec 01 20:28:34 crc kubenswrapper[4888]: I1201 20:28:34.982912 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:35 crc kubenswrapper[4888]: I1201 20:28:35.935293 4888 generic.go:334] "Generic (PLEG): container finished" podID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerID="8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee" exitCode=0 Dec 01 20:28:35 crc kubenswrapper[4888]: I1201 20:28:35.935425 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerDied","Data":"8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee"} Dec 01 20:28:36 crc kubenswrapper[4888]: I1201 20:28:36.381491 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:38 crc kubenswrapper[4888]: I1201 20:28:38.068424 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zzmhp" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="registry-server" containerID="cri-o://5a1acab26ed5f8e4802a026e26674d5683debb6b3e039e7e3c5e4bb676594408" gracePeriod=2 Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.080791 4888 generic.go:334] "Generic (PLEG): container finished" podID="27065557-9c10-446a-80ff-de365f6f5b4d" containerID="5a1acab26ed5f8e4802a026e26674d5683debb6b3e039e7e3c5e4bb676594408" exitCode=0 Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.080981 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerDied","Data":"5a1acab26ed5f8e4802a026e26674d5683debb6b3e039e7e3c5e4bb676594408"} Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.081635 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zzmhp" event={"ID":"27065557-9c10-446a-80ff-de365f6f5b4d","Type":"ContainerDied","Data":"4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966"} Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.081661 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a8592b6c28fc2050bf9094e04bf7b1d1ad3bd02573d31971f85d7fdbbd65966" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.083761 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerStarted","Data":"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197"} Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.149539 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.292414 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zwkd\" (UniqueName: \"kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd\") pod \"27065557-9c10-446a-80ff-de365f6f5b4d\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.292495 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content\") pod \"27065557-9c10-446a-80ff-de365f6f5b4d\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.292643 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities\") pod \"27065557-9c10-446a-80ff-de365f6f5b4d\" (UID: \"27065557-9c10-446a-80ff-de365f6f5b4d\") " Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.293475 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities" (OuterVolumeSpecName: "utilities") pod "27065557-9c10-446a-80ff-de365f6f5b4d" (UID: "27065557-9c10-446a-80ff-de365f6f5b4d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.298382 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd" (OuterVolumeSpecName: "kube-api-access-2zwkd") pod "27065557-9c10-446a-80ff-de365f6f5b4d" (UID: "27065557-9c10-446a-80ff-de365f6f5b4d"). InnerVolumeSpecName "kube-api-access-2zwkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.331757 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27065557-9c10-446a-80ff-de365f6f5b4d" (UID: "27065557-9c10-446a-80ff-de365f6f5b4d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.394499 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.394534 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zwkd\" (UniqueName: \"kubernetes.io/projected/27065557-9c10-446a-80ff-de365f6f5b4d-kube-api-access-2zwkd\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:39 crc kubenswrapper[4888]: I1201 20:28:39.394547 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27065557-9c10-446a-80ff-de365f6f5b4d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:40 crc kubenswrapper[4888]: I1201 20:28:40.090282 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zzmhp" Dec 01 20:28:40 crc kubenswrapper[4888]: I1201 20:28:40.124057 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:40 crc kubenswrapper[4888]: I1201 20:28:40.132070 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zzmhp"] Dec 01 20:28:40 crc kubenswrapper[4888]: I1201 20:28:40.464145 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" path="/var/lib/kubelet/pods/27065557-9c10-446a-80ff-de365f6f5b4d/volumes" Dec 01 20:28:41 crc kubenswrapper[4888]: I1201 20:28:41.109085 4888 generic.go:334] "Generic (PLEG): container finished" podID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerID="3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197" exitCode=0 Dec 01 20:28:41 crc kubenswrapper[4888]: I1201 20:28:41.109191 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerDied","Data":"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197"} Dec 01 20:28:42 crc kubenswrapper[4888]: I1201 20:28:42.120581 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerStarted","Data":"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af"} Dec 01 20:28:42 crc kubenswrapper[4888]: I1201 20:28:42.142249 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cb27v" podStartSLOduration=3.289565117 podStartE2EDuration="9.142231629s" podCreationTimestamp="2025-12-01 20:28:33 +0000 UTC" firstStartedPulling="2025-12-01 20:28:35.937906771 +0000 UTC m=+3315.808936685" lastFinishedPulling="2025-12-01 20:28:41.790573273 +0000 UTC m=+3321.661603197" observedRunningTime="2025-12-01 20:28:42.137071653 +0000 UTC m=+3322.008101577" watchObservedRunningTime="2025-12-01 20:28:42.142231629 +0000 UTC m=+3322.013261543" Dec 01 20:28:44 crc kubenswrapper[4888]: I1201 20:28:44.365757 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:44 crc kubenswrapper[4888]: I1201 20:28:44.366067 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:45 crc kubenswrapper[4888]: I1201 20:28:45.417595 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cb27v" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="registry-server" probeResult="failure" output=< Dec 01 20:28:45 crc kubenswrapper[4888]: timeout: failed to connect service ":50051" within 1s Dec 01 20:28:45 crc kubenswrapper[4888]: > Dec 01 20:28:50 crc kubenswrapper[4888]: I1201 20:28:50.037415 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:28:50 crc kubenswrapper[4888]: I1201 20:28:50.037991 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:28:54 crc kubenswrapper[4888]: I1201 20:28:54.413795 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:54 crc kubenswrapper[4888]: I1201 20:28:54.469632 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:54 crc kubenswrapper[4888]: I1201 20:28:54.920167 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.246528 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cb27v" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="registry-server" containerID="cri-o://a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af" gracePeriod=2 Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.761203 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.832119 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgsz5\" (UniqueName: \"kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5\") pod \"95b1ac94-cc34-40a3-b842-0c4126073eb6\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.832230 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities\") pod \"95b1ac94-cc34-40a3-b842-0c4126073eb6\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.832509 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content\") pod \"95b1ac94-cc34-40a3-b842-0c4126073eb6\" (UID: \"95b1ac94-cc34-40a3-b842-0c4126073eb6\") " Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.833270 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities" (OuterVolumeSpecName: "utilities") pod "95b1ac94-cc34-40a3-b842-0c4126073eb6" (UID: "95b1ac94-cc34-40a3-b842-0c4126073eb6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.838396 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5" (OuterVolumeSpecName: "kube-api-access-mgsz5") pod "95b1ac94-cc34-40a3-b842-0c4126073eb6" (UID: "95b1ac94-cc34-40a3-b842-0c4126073eb6"). InnerVolumeSpecName "kube-api-access-mgsz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.934464 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgsz5\" (UniqueName: \"kubernetes.io/projected/95b1ac94-cc34-40a3-b842-0c4126073eb6-kube-api-access-mgsz5\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.934502 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:56 crc kubenswrapper[4888]: I1201 20:28:56.949748 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95b1ac94-cc34-40a3-b842-0c4126073eb6" (UID: "95b1ac94-cc34-40a3-b842-0c4126073eb6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.036139 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95b1ac94-cc34-40a3-b842-0c4126073eb6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.257622 4888 generic.go:334] "Generic (PLEG): container finished" podID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerID="a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af" exitCode=0 Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.257668 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerDied","Data":"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af"} Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.257676 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cb27v" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.257698 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cb27v" event={"ID":"95b1ac94-cc34-40a3-b842-0c4126073eb6","Type":"ContainerDied","Data":"4465b361348a6dba5681fd4e3c3f18a13b347b7cd758cd875af85a9a182140f9"} Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.257717 4888 scope.go:117] "RemoveContainer" containerID="a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.281164 4888 scope.go:117] "RemoveContainer" containerID="3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.296706 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.305013 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cb27v"] Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.306047 4888 scope.go:117] "RemoveContainer" containerID="8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.364335 4888 scope.go:117] "RemoveContainer" containerID="a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af" Dec 01 20:28:57 crc kubenswrapper[4888]: E1201 20:28:57.364661 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af\": container with ID starting with a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af not found: ID does not exist" containerID="a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.364693 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af"} err="failed to get container status \"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af\": rpc error: code = NotFound desc = could not find container \"a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af\": container with ID starting with a5fa9195b134ad26db03c88027cd8e644a72e02cef4a7df690336b7636d190af not found: ID does not exist" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.364717 4888 scope.go:117] "RemoveContainer" containerID="3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197" Dec 01 20:28:57 crc kubenswrapper[4888]: E1201 20:28:57.364919 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197\": container with ID starting with 3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197 not found: ID does not exist" containerID="3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.364951 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197"} err="failed to get container status \"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197\": rpc error: code = NotFound desc = could not find container \"3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197\": container with ID starting with 3f79a969effe1d73b05cfc13177b5f592a9b1a65fec323af09ccab24b94b3197 not found: ID does not exist" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.364966 4888 scope.go:117] "RemoveContainer" containerID="8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee" Dec 01 20:28:57 crc kubenswrapper[4888]: E1201 20:28:57.365204 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee\": container with ID starting with 8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee not found: ID does not exist" containerID="8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee" Dec 01 20:28:57 crc kubenswrapper[4888]: I1201 20:28:57.365237 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee"} err="failed to get container status \"8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee\": rpc error: code = NotFound desc = could not find container \"8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee\": container with ID starting with 8dae39f4aa0eae3bd423c47dcb0b8abbcec53c976ef702b83de8dfb3aaccbbee not found: ID does not exist" Dec 01 20:28:58 crc kubenswrapper[4888]: I1201 20:28:58.468324 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" path="/var/lib/kubelet/pods/95b1ac94-cc34-40a3-b842-0c4126073eb6/volumes" Dec 01 20:29:05 crc kubenswrapper[4888]: I1201 20:29:05.975956 4888 scope.go:117] "RemoveContainer" containerID="ffe70f21c9e6931c21f66168aea08a1ae08925dfdc6a8993c675def59f051918" Dec 01 20:29:06 crc kubenswrapper[4888]: I1201 20:29:06.014491 4888 scope.go:117] "RemoveContainer" containerID="26313f399f1709b04c5e5dd0c73e0226190f83b541d03c04453bf969682b63b4" Dec 01 20:29:06 crc kubenswrapper[4888]: I1201 20:29:06.046792 4888 scope.go:117] "RemoveContainer" containerID="47d0bab54127c07ea86d909b2b3c5051a916aa6c0c039cc4148bd750801a41af" Dec 01 20:29:20 crc kubenswrapper[4888]: I1201 20:29:20.038358 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:29:20 crc kubenswrapper[4888]: I1201 20:29:20.038846 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.037634 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.038633 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.038718 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.040016 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.040135 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2" gracePeriod=600 Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.720164 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2" exitCode=0 Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.720732 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2"} Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.720761 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03"} Dec 01 20:29:50 crc kubenswrapper[4888]: I1201 20:29:50.720777 4888 scope.go:117] "RemoveContainer" containerID="144c7372f16a6f526a25a61ff2fbad8442f24ccf555a595f36668630288212f4" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.147712 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg"] Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148786 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148804 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148823 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="extract-utilities" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148830 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="extract-utilities" Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148878 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148887 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148896 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="extract-content" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148903 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="extract-content" Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148926 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="extract-utilities" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148933 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="extract-utilities" Dec 01 20:30:00 crc kubenswrapper[4888]: E1201 20:30:00.148948 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="extract-content" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.148955 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="extract-content" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.149226 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="27065557-9c10-446a-80ff-de365f6f5b4d" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.149248 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="95b1ac94-cc34-40a3-b842-0c4126073eb6" containerName="registry-server" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.150496 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.152766 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.152992 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.160797 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg"] Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.193905 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.194353 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsv7m\" (UniqueName: \"kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.194410 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.296195 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsv7m\" (UniqueName: \"kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.296267 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.296352 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.297336 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.303840 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.319952 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsv7m\" (UniqueName: \"kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m\") pod \"collect-profiles-29410350-dpwrg\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.472597 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:00 crc kubenswrapper[4888]: I1201 20:30:00.894412 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg"] Dec 01 20:30:01 crc kubenswrapper[4888]: I1201 20:30:01.820028 4888 generic.go:334] "Generic (PLEG): container finished" podID="eeb85494-45fe-452e-9f9d-7981db98ea40" containerID="45ad00c2aa79dcf8137400003fbd846f1da5ac7a7a172ea888782c271c431ae0" exitCode=0 Dec 01 20:30:01 crc kubenswrapper[4888]: I1201 20:30:01.820127 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" event={"ID":"eeb85494-45fe-452e-9f9d-7981db98ea40","Type":"ContainerDied","Data":"45ad00c2aa79dcf8137400003fbd846f1da5ac7a7a172ea888782c271c431ae0"} Dec 01 20:30:01 crc kubenswrapper[4888]: I1201 20:30:01.820332 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" event={"ID":"eeb85494-45fe-452e-9f9d-7981db98ea40","Type":"ContainerStarted","Data":"b874fe3922fe0174316fe56a4703bf82e4e2ba41faba5f26f7a71ce1d0e90afa"} Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.232802 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.248793 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsv7m\" (UniqueName: \"kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m\") pod \"eeb85494-45fe-452e-9f9d-7981db98ea40\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.248917 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume\") pod \"eeb85494-45fe-452e-9f9d-7981db98ea40\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.248983 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume\") pod \"eeb85494-45fe-452e-9f9d-7981db98ea40\" (UID: \"eeb85494-45fe-452e-9f9d-7981db98ea40\") " Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.249916 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume" (OuterVolumeSpecName: "config-volume") pod "eeb85494-45fe-452e-9f9d-7981db98ea40" (UID: "eeb85494-45fe-452e-9f9d-7981db98ea40"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.255562 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eeb85494-45fe-452e-9f9d-7981db98ea40" (UID: "eeb85494-45fe-452e-9f9d-7981db98ea40"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.255657 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m" (OuterVolumeSpecName: "kube-api-access-tsv7m") pod "eeb85494-45fe-452e-9f9d-7981db98ea40" (UID: "eeb85494-45fe-452e-9f9d-7981db98ea40"). InnerVolumeSpecName "kube-api-access-tsv7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.351106 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsv7m\" (UniqueName: \"kubernetes.io/projected/eeb85494-45fe-452e-9f9d-7981db98ea40-kube-api-access-tsv7m\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.351148 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eeb85494-45fe-452e-9f9d-7981db98ea40-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.351157 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eeb85494-45fe-452e-9f9d-7981db98ea40-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.843625 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" event={"ID":"eeb85494-45fe-452e-9f9d-7981db98ea40","Type":"ContainerDied","Data":"b874fe3922fe0174316fe56a4703bf82e4e2ba41faba5f26f7a71ce1d0e90afa"} Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.844014 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b874fe3922fe0174316fe56a4703bf82e4e2ba41faba5f26f7a71ce1d0e90afa" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.843669 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410350-dpwrg" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.926281 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:03 crc kubenswrapper[4888]: E1201 20:30:03.926795 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeb85494-45fe-452e-9f9d-7981db98ea40" containerName="collect-profiles" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.926817 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeb85494-45fe-452e-9f9d-7981db98ea40" containerName="collect-profiles" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.927104 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeb85494-45fe-452e-9f9d-7981db98ea40" containerName="collect-profiles" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.928920 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:03 crc kubenswrapper[4888]: I1201 20:30:03.950096 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.064450 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5rph\" (UniqueName: \"kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.064501 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.064612 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.167058 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5rph\" (UniqueName: \"kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.167113 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.167250 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.168023 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.168027 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.187596 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5rph\" (UniqueName: \"kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph\") pod \"redhat-marketplace-4w6nx\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.256605 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.332152 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84"] Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.343321 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410305-tht84"] Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.463047 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8a2afe5-2d97-407d-a226-40c0d3a61690" path="/var/lib/kubelet/pods/d8a2afe5-2d97-407d-a226-40c0d3a61690/volumes" Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.753034 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:04 crc kubenswrapper[4888]: I1201 20:30:04.854605 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerStarted","Data":"f46d90a82114ebe2fabdf62929e7191f236edc17e098eebd1d33f45f42941ae8"} Dec 01 20:30:05 crc kubenswrapper[4888]: I1201 20:30:05.864983 4888 generic.go:334] "Generic (PLEG): container finished" podID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerID="4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762" exitCode=0 Dec 01 20:30:05 crc kubenswrapper[4888]: I1201 20:30:05.865042 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerDied","Data":"4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762"} Dec 01 20:30:06 crc kubenswrapper[4888]: I1201 20:30:06.136758 4888 scope.go:117] "RemoveContainer" containerID="5308917796fe41ce9d4974d52f1ca3a51918251b7ec9f3e894bc2ff57911b1d5" Dec 01 20:30:07 crc kubenswrapper[4888]: I1201 20:30:07.883455 4888 generic.go:334] "Generic (PLEG): container finished" podID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerID="37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab" exitCode=0 Dec 01 20:30:07 crc kubenswrapper[4888]: I1201 20:30:07.883547 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerDied","Data":"37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab"} Dec 01 20:30:09 crc kubenswrapper[4888]: I1201 20:30:09.902299 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerStarted","Data":"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af"} Dec 01 20:30:09 crc kubenswrapper[4888]: I1201 20:30:09.921444 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4w6nx" podStartSLOduration=4.016879056 podStartE2EDuration="6.921424198s" podCreationTimestamp="2025-12-01 20:30:03 +0000 UTC" firstStartedPulling="2025-12-01 20:30:05.86695337 +0000 UTC m=+3405.737983284" lastFinishedPulling="2025-12-01 20:30:08.771498512 +0000 UTC m=+3408.642528426" observedRunningTime="2025-12-01 20:30:09.9197257 +0000 UTC m=+3409.790755624" watchObservedRunningTime="2025-12-01 20:30:09.921424198 +0000 UTC m=+3409.792454112" Dec 01 20:30:14 crc kubenswrapper[4888]: I1201 20:30:14.258078 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:14 crc kubenswrapper[4888]: I1201 20:30:14.260318 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:14 crc kubenswrapper[4888]: I1201 20:30:14.303096 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:14 crc kubenswrapper[4888]: I1201 20:30:14.999585 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:15 crc kubenswrapper[4888]: I1201 20:30:15.050326 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:16 crc kubenswrapper[4888]: I1201 20:30:16.961719 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4w6nx" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="registry-server" containerID="cri-o://1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af" gracePeriod=2 Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.458749 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.613691 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities\") pod \"718b6f43-572e-47bf-aa5d-1013e6a923ad\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.613749 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5rph\" (UniqueName: \"kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph\") pod \"718b6f43-572e-47bf-aa5d-1013e6a923ad\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.614043 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content\") pod \"718b6f43-572e-47bf-aa5d-1013e6a923ad\" (UID: \"718b6f43-572e-47bf-aa5d-1013e6a923ad\") " Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.615294 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities" (OuterVolumeSpecName: "utilities") pod "718b6f43-572e-47bf-aa5d-1013e6a923ad" (UID: "718b6f43-572e-47bf-aa5d-1013e6a923ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.619869 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph" (OuterVolumeSpecName: "kube-api-access-d5rph") pod "718b6f43-572e-47bf-aa5d-1013e6a923ad" (UID: "718b6f43-572e-47bf-aa5d-1013e6a923ad"). InnerVolumeSpecName "kube-api-access-d5rph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.632097 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "718b6f43-572e-47bf-aa5d-1013e6a923ad" (UID: "718b6f43-572e-47bf-aa5d-1013e6a923ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.716032 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5rph\" (UniqueName: \"kubernetes.io/projected/718b6f43-572e-47bf-aa5d-1013e6a923ad-kube-api-access-d5rph\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.716067 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.716079 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/718b6f43-572e-47bf-aa5d-1013e6a923ad-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.971658 4888 generic.go:334] "Generic (PLEG): container finished" podID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerID="1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af" exitCode=0 Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.971699 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerDied","Data":"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af"} Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.971733 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4w6nx" event={"ID":"718b6f43-572e-47bf-aa5d-1013e6a923ad","Type":"ContainerDied","Data":"f46d90a82114ebe2fabdf62929e7191f236edc17e098eebd1d33f45f42941ae8"} Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.971735 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4w6nx" Dec 01 20:30:17 crc kubenswrapper[4888]: I1201 20:30:17.971752 4888 scope.go:117] "RemoveContainer" containerID="1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:17.999846 4888 scope.go:117] "RemoveContainer" containerID="37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.012556 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.023220 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4w6nx"] Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.059174 4888 scope.go:117] "RemoveContainer" containerID="4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.109245 4888 scope.go:117] "RemoveContainer" containerID="1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af" Dec 01 20:30:18 crc kubenswrapper[4888]: E1201 20:30:18.109757 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af\": container with ID starting with 1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af not found: ID does not exist" containerID="1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.109788 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af"} err="failed to get container status \"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af\": rpc error: code = NotFound desc = could not find container \"1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af\": container with ID starting with 1480b096f950a654bbbd283adcaeca0f338ca6b5e9b751d81e092cac1934f3af not found: ID does not exist" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.109819 4888 scope.go:117] "RemoveContainer" containerID="37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab" Dec 01 20:30:18 crc kubenswrapper[4888]: E1201 20:30:18.110261 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab\": container with ID starting with 37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab not found: ID does not exist" containerID="37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.110295 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab"} err="failed to get container status \"37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab\": rpc error: code = NotFound desc = could not find container \"37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab\": container with ID starting with 37aba320ccad1be2ffdc31a0a009d935e9bffa76dfe8d69f6a65142baa7329ab not found: ID does not exist" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.110315 4888 scope.go:117] "RemoveContainer" containerID="4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762" Dec 01 20:30:18 crc kubenswrapper[4888]: E1201 20:30:18.110562 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762\": container with ID starting with 4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762 not found: ID does not exist" containerID="4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.110593 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762"} err="failed to get container status \"4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762\": rpc error: code = NotFound desc = could not find container \"4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762\": container with ID starting with 4456c883e0129cb5e406fbc233a6a28df891c6db1d47b1c10dc358c4d9306762 not found: ID does not exist" Dec 01 20:30:18 crc kubenswrapper[4888]: I1201 20:30:18.463934 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" path="/var/lib/kubelet/pods/718b6f43-572e-47bf-aa5d-1013e6a923ad/volumes" Dec 01 20:31:50 crc kubenswrapper[4888]: I1201 20:31:50.038151 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:31:50 crc kubenswrapper[4888]: I1201 20:31:50.038718 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:32:20 crc kubenswrapper[4888]: I1201 20:32:20.037892 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:32:20 crc kubenswrapper[4888]: I1201 20:32:20.038492 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.022882 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:43 crc kubenswrapper[4888]: E1201 20:32:43.023989 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="registry-server" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.024004 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="registry-server" Dec 01 20:32:43 crc kubenswrapper[4888]: E1201 20:32:43.024024 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="extract-content" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.024030 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="extract-content" Dec 01 20:32:43 crc kubenswrapper[4888]: E1201 20:32:43.024059 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="extract-utilities" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.024065 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="extract-utilities" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.024257 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="718b6f43-572e-47bf-aa5d-1013e6a923ad" containerName="registry-server" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.027608 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.037868 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.159108 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.159640 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxx9m\" (UniqueName: \"kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.159747 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.261510 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxx9m\" (UniqueName: \"kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.261888 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.262077 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.262442 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.262502 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.287498 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxx9m\" (UniqueName: \"kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m\") pod \"certified-operators-g5mqg\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.347698 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:43 crc kubenswrapper[4888]: I1201 20:32:43.874829 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:44 crc kubenswrapper[4888]: I1201 20:32:44.316088 4888 generic.go:334] "Generic (PLEG): container finished" podID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerID="d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879" exitCode=0 Dec 01 20:32:44 crc kubenswrapper[4888]: I1201 20:32:44.316574 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerDied","Data":"d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879"} Dec 01 20:32:44 crc kubenswrapper[4888]: I1201 20:32:44.316615 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerStarted","Data":"0639b18a66d98c3ac315e8089219949aa9904d595086dc37955e165bd1980877"} Dec 01 20:32:46 crc kubenswrapper[4888]: I1201 20:32:46.333920 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerDied","Data":"afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18"} Dec 01 20:32:46 crc kubenswrapper[4888]: I1201 20:32:46.333870 4888 generic.go:334] "Generic (PLEG): container finished" podID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerID="afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18" exitCode=0 Dec 01 20:32:47 crc kubenswrapper[4888]: I1201 20:32:47.344682 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerStarted","Data":"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3"} Dec 01 20:32:47 crc kubenswrapper[4888]: I1201 20:32:47.370147 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g5mqg" podStartSLOduration=1.934462361 podStartE2EDuration="4.370121197s" podCreationTimestamp="2025-12-01 20:32:43 +0000 UTC" firstStartedPulling="2025-12-01 20:32:44.31937749 +0000 UTC m=+3564.190407404" lastFinishedPulling="2025-12-01 20:32:46.755036306 +0000 UTC m=+3566.626066240" observedRunningTime="2025-12-01 20:32:47.360034174 +0000 UTC m=+3567.231064118" watchObservedRunningTime="2025-12-01 20:32:47.370121197 +0000 UTC m=+3567.241151111" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.037760 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.038066 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.038118 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.038939 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.039007 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" gracePeriod=600 Dec 01 20:32:50 crc kubenswrapper[4888]: E1201 20:32:50.163112 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.369050 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" exitCode=0 Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.369095 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03"} Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.369131 4888 scope.go:117] "RemoveContainer" containerID="5e82a4d60c6ef4d2175a82de16b09ff4bd56174ac0a5da255b70bd4ffb0708b2" Dec 01 20:32:50 crc kubenswrapper[4888]: I1201 20:32:50.369914 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:32:50 crc kubenswrapper[4888]: E1201 20:32:50.370342 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:32:53 crc kubenswrapper[4888]: I1201 20:32:53.348170 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:53 crc kubenswrapper[4888]: I1201 20:32:53.348656 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:53 crc kubenswrapper[4888]: I1201 20:32:53.405124 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:53 crc kubenswrapper[4888]: I1201 20:32:53.453355 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:53 crc kubenswrapper[4888]: I1201 20:32:53.640885 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:55 crc kubenswrapper[4888]: I1201 20:32:55.411548 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g5mqg" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="registry-server" containerID="cri-o://84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3" gracePeriod=2 Dec 01 20:32:55 crc kubenswrapper[4888]: I1201 20:32:55.943780 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.052302 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content\") pod \"726436fa-4aef-4ac4-a8bf-84a89c506b01\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.052514 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxx9m\" (UniqueName: \"kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m\") pod \"726436fa-4aef-4ac4-a8bf-84a89c506b01\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.052535 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities\") pod \"726436fa-4aef-4ac4-a8bf-84a89c506b01\" (UID: \"726436fa-4aef-4ac4-a8bf-84a89c506b01\") " Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.053853 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities" (OuterVolumeSpecName: "utilities") pod "726436fa-4aef-4ac4-a8bf-84a89c506b01" (UID: "726436fa-4aef-4ac4-a8bf-84a89c506b01"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.059707 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m" (OuterVolumeSpecName: "kube-api-access-pxx9m") pod "726436fa-4aef-4ac4-a8bf-84a89c506b01" (UID: "726436fa-4aef-4ac4-a8bf-84a89c506b01"). InnerVolumeSpecName "kube-api-access-pxx9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.108692 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "726436fa-4aef-4ac4-a8bf-84a89c506b01" (UID: "726436fa-4aef-4ac4-a8bf-84a89c506b01"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.154640 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxx9m\" (UniqueName: \"kubernetes.io/projected/726436fa-4aef-4ac4-a8bf-84a89c506b01-kube-api-access-pxx9m\") on node \"crc\" DevicePath \"\"" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.154682 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.154694 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/726436fa-4aef-4ac4-a8bf-84a89c506b01-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.420997 4888 generic.go:334] "Generic (PLEG): container finished" podID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerID="84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3" exitCode=0 Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.421060 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g5mqg" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.421051 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerDied","Data":"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3"} Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.421409 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g5mqg" event={"ID":"726436fa-4aef-4ac4-a8bf-84a89c506b01","Type":"ContainerDied","Data":"0639b18a66d98c3ac315e8089219949aa9904d595086dc37955e165bd1980877"} Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.421436 4888 scope.go:117] "RemoveContainer" containerID="84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.447555 4888 scope.go:117] "RemoveContainer" containerID="afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.470974 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.471051 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g5mqg"] Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.484296 4888 scope.go:117] "RemoveContainer" containerID="d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.512626 4888 scope.go:117] "RemoveContainer" containerID="84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3" Dec 01 20:32:56 crc kubenswrapper[4888]: E1201 20:32:56.513019 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3\": container with ID starting with 84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3 not found: ID does not exist" containerID="84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.513061 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3"} err="failed to get container status \"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3\": rpc error: code = NotFound desc = could not find container \"84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3\": container with ID starting with 84981e0284d21e0fbdc56b0bd627c8ee4ddbd6551461b620889f7ceaa43973a3 not found: ID does not exist" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.513093 4888 scope.go:117] "RemoveContainer" containerID="afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18" Dec 01 20:32:56 crc kubenswrapper[4888]: E1201 20:32:56.513527 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18\": container with ID starting with afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18 not found: ID does not exist" containerID="afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.513568 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18"} err="failed to get container status \"afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18\": rpc error: code = NotFound desc = could not find container \"afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18\": container with ID starting with afd257c92b058293e8de4d47ca1e035612c1b15109ab322ea73c43b81600fd18 not found: ID does not exist" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.513598 4888 scope.go:117] "RemoveContainer" containerID="d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879" Dec 01 20:32:56 crc kubenswrapper[4888]: E1201 20:32:56.513904 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879\": container with ID starting with d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879 not found: ID does not exist" containerID="d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879" Dec 01 20:32:56 crc kubenswrapper[4888]: I1201 20:32:56.513953 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879"} err="failed to get container status \"d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879\": rpc error: code = NotFound desc = could not find container \"d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879\": container with ID starting with d02d9b4eb5d393de27bba5c7249c37eb9ec2a124a67d9a8311a7e56ef5af2879 not found: ID does not exist" Dec 01 20:32:58 crc kubenswrapper[4888]: I1201 20:32:58.466586 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" path="/var/lib/kubelet/pods/726436fa-4aef-4ac4-a8bf-84a89c506b01/volumes" Dec 01 20:33:03 crc kubenswrapper[4888]: I1201 20:33:03.450979 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:33:03 crc kubenswrapper[4888]: E1201 20:33:03.451693 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:33:14 crc kubenswrapper[4888]: I1201 20:33:14.451973 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:33:14 crc kubenswrapper[4888]: E1201 20:33:14.453413 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:33:23 crc kubenswrapper[4888]: I1201 20:33:23.685181 4888 generic.go:334] "Generic (PLEG): container finished" podID="512845e9-2357-4129-bfb0-4e636ea554e9" containerID="a8a640282116068571f24b32b5ad187630826a8b00e08e7f70d1f0fd7fc34a3d" exitCode=0 Dec 01 20:33:23 crc kubenswrapper[4888]: I1201 20:33:23.685382 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"512845e9-2357-4129-bfb0-4e636ea554e9","Type":"ContainerDied","Data":"a8a640282116068571f24b32b5ad187630826a8b00e08e7f70d1f0fd7fc34a3d"} Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.037630 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.235891 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.235966 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236134 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236242 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236287 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236313 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236385 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236428 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236451 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw5g4\" (UniqueName: \"kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4\") pod \"512845e9-2357-4129-bfb0-4e636ea554e9\" (UID: \"512845e9-2357-4129-bfb0-4e636ea554e9\") " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236701 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.236919 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data" (OuterVolumeSpecName: "config-data") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.237007 4888 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.237027 4888 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.242667 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.242765 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4" (OuterVolumeSpecName: "kube-api-access-xw5g4") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "kube-api-access-xw5g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.244449 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.266676 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.266711 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.267210 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.307589 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "512845e9-2357-4129-bfb0-4e636ea554e9" (UID: "512845e9-2357-4129-bfb0-4e636ea554e9"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339332 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339387 4888 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339398 4888 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339407 4888 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/512845e9-2357-4129-bfb0-4e636ea554e9-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339417 4888 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/512845e9-2357-4129-bfb0-4e636ea554e9-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339427 4888 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/512845e9-2357-4129-bfb0-4e636ea554e9-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.339438 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw5g4\" (UniqueName: \"kubernetes.io/projected/512845e9-2357-4129-bfb0-4e636ea554e9-kube-api-access-xw5g4\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.360456 4888 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.441017 4888 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.705266 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"512845e9-2357-4129-bfb0-4e636ea554e9","Type":"ContainerDied","Data":"bab0ec730f6c9a6ddafe6f86f8c35b0d85f385d36ca3d3c5aecbf1acd5dd04ee"} Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.705320 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bab0ec730f6c9a6ddafe6f86f8c35b0d85f385d36ca3d3c5aecbf1acd5dd04ee" Dec 01 20:33:25 crc kubenswrapper[4888]: I1201 20:33:25.705344 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 20:33:27 crc kubenswrapper[4888]: I1201 20:33:27.452495 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:33:27 crc kubenswrapper[4888]: E1201 20:33:27.453224 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.811254 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:33:28 crc kubenswrapper[4888]: E1201 20:33:28.812288 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="registry-server" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812312 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="registry-server" Dec 01 20:33:28 crc kubenswrapper[4888]: E1201 20:33:28.812329 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="extract-content" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812337 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="extract-content" Dec 01 20:33:28 crc kubenswrapper[4888]: E1201 20:33:28.812367 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="512845e9-2357-4129-bfb0-4e636ea554e9" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812376 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="512845e9-2357-4129-bfb0-4e636ea554e9" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:33:28 crc kubenswrapper[4888]: E1201 20:33:28.812408 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="extract-utilities" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812417 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="extract-utilities" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812744 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="512845e9-2357-4129-bfb0-4e636ea554e9" containerName="tempest-tests-tempest-tests-runner" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.812800 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="726436fa-4aef-4ac4-a8bf-84a89c506b01" containerName="registry-server" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.813711 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.823837 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:33:28 crc kubenswrapper[4888]: I1201 20:33:28.835639 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-mp6w7" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.011738 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.011895 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8dzf\" (UniqueName: \"kubernetes.io/projected/54cbeba4-57d6-4934-8bc6-61cea77023c8-kube-api-access-n8dzf\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.114098 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8dzf\" (UniqueName: \"kubernetes.io/projected/54cbeba4-57d6-4934-8bc6-61cea77023c8-kube-api-access-n8dzf\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.114358 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.114763 4888 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.137304 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8dzf\" (UniqueName: \"kubernetes.io/projected/54cbeba4-57d6-4934-8bc6-61cea77023c8-kube-api-access-n8dzf\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.139550 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"54cbeba4-57d6-4934-8bc6-61cea77023c8\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.167808 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.601491 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.611444 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:33:29 crc kubenswrapper[4888]: I1201 20:33:29.751809 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"54cbeba4-57d6-4934-8bc6-61cea77023c8","Type":"ContainerStarted","Data":"6949361792fe0685b8b1b17b8d02c2d7ef72215e53205884956d8a71c2e62600"} Dec 01 20:33:31 crc kubenswrapper[4888]: I1201 20:33:31.777389 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"54cbeba4-57d6-4934-8bc6-61cea77023c8","Type":"ContainerStarted","Data":"36b54de660d3022fdb359f9613903b471ae39c36b30db41443f172fe1ca14970"} Dec 01 20:33:31 crc kubenswrapper[4888]: I1201 20:33:31.796680 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.586395379 podStartE2EDuration="3.796662634s" podCreationTimestamp="2025-12-01 20:33:28 +0000 UTC" firstStartedPulling="2025-12-01 20:33:29.611160617 +0000 UTC m=+3609.482190531" lastFinishedPulling="2025-12-01 20:33:30.821427872 +0000 UTC m=+3610.692457786" observedRunningTime="2025-12-01 20:33:31.793148845 +0000 UTC m=+3611.664178759" watchObservedRunningTime="2025-12-01 20:33:31.796662634 +0000 UTC m=+3611.667692548" Dec 01 20:33:41 crc kubenswrapper[4888]: I1201 20:33:41.452124 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:33:41 crc kubenswrapper[4888]: E1201 20:33:41.452992 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.394543 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xms5/must-gather-6sjdk"] Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.396553 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.398833 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9xms5"/"default-dockercfg-w2w4c" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.399147 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9xms5"/"openshift-service-ca.crt" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.403740 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9xms5"/"kube-root-ca.crt" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.414197 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9xms5/must-gather-6sjdk"] Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.453448 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:33:53 crc kubenswrapper[4888]: E1201 20:33:53.453693 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.504365 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcs7q\" (UniqueName: \"kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.504468 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.607708 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcs7q\" (UniqueName: \"kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.607791 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.608180 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.627148 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcs7q\" (UniqueName: \"kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q\") pod \"must-gather-6sjdk\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:53 crc kubenswrapper[4888]: I1201 20:33:53.716073 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:33:54 crc kubenswrapper[4888]: I1201 20:33:54.178404 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9xms5/must-gather-6sjdk"] Dec 01 20:33:55 crc kubenswrapper[4888]: I1201 20:33:55.035302 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/must-gather-6sjdk" event={"ID":"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5","Type":"ContainerStarted","Data":"4f140546bd29fb0649cfdd80ee30a9c29960415574d34740bd167ae21a705d86"} Dec 01 20:34:00 crc kubenswrapper[4888]: I1201 20:34:00.089904 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/must-gather-6sjdk" event={"ID":"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5","Type":"ContainerStarted","Data":"406770a9b843388fd9373eb52f3f456dee5932508c31da7335ae734039f94e2f"} Dec 01 20:34:00 crc kubenswrapper[4888]: I1201 20:34:00.091672 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/must-gather-6sjdk" event={"ID":"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5","Type":"ContainerStarted","Data":"880c299ae69b61d21fe0a9493675cb35e586d54d462afb77cb8081a15b55e59e"} Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.536839 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9xms5/must-gather-6sjdk" podStartSLOduration=4.941778901 podStartE2EDuration="9.536820989s" podCreationTimestamp="2025-12-01 20:33:53 +0000 UTC" firstStartedPulling="2025-12-01 20:33:54.180116818 +0000 UTC m=+3634.051146742" lastFinishedPulling="2025-12-01 20:33:58.775158916 +0000 UTC m=+3638.646188830" observedRunningTime="2025-12-01 20:34:00.110592298 +0000 UTC m=+3639.981622212" watchObservedRunningTime="2025-12-01 20:34:02.536820989 +0000 UTC m=+3642.407850903" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.543443 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xms5/crc-debug-7l8dx"] Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.544810 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.622823 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.622905 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55896\" (UniqueName: \"kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.725274 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.725408 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55896\" (UniqueName: \"kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.725758 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.750012 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55896\" (UniqueName: \"kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896\") pod \"crc-debug-7l8dx\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: I1201 20:34:02.863396 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:02 crc kubenswrapper[4888]: W1201 20:34:02.892914 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde16641c_d417_41cc_b34d_db32de5c8278.slice/crio-bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5 WatchSource:0}: Error finding container bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5: Status 404 returned error can't find the container with id bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5 Dec 01 20:34:03 crc kubenswrapper[4888]: I1201 20:34:03.119163 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" event={"ID":"de16641c-d417-41cc-b34d-db32de5c8278","Type":"ContainerStarted","Data":"bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5"} Dec 01 20:34:06 crc kubenswrapper[4888]: I1201 20:34:06.451957 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:34:06 crc kubenswrapper[4888]: E1201 20:34:06.452836 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:34:14 crc kubenswrapper[4888]: I1201 20:34:14.273756 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" event={"ID":"de16641c-d417-41cc-b34d-db32de5c8278","Type":"ContainerStarted","Data":"5286d261e14e7a88815f53e00c6e28b14279986791865c27a1b954d5371b6c6f"} Dec 01 20:34:14 crc kubenswrapper[4888]: I1201 20:34:14.293602 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" podStartSLOduration=1.325709625 podStartE2EDuration="12.293579914s" podCreationTimestamp="2025-12-01 20:34:02 +0000 UTC" firstStartedPulling="2025-12-01 20:34:02.894743405 +0000 UTC m=+3642.765773319" lastFinishedPulling="2025-12-01 20:34:13.862613694 +0000 UTC m=+3653.733643608" observedRunningTime="2025-12-01 20:34:14.287019579 +0000 UTC m=+3654.158049503" watchObservedRunningTime="2025-12-01 20:34:14.293579914 +0000 UTC m=+3654.164609828" Dec 01 20:34:17 crc kubenswrapper[4888]: I1201 20:34:17.452301 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:34:17 crc kubenswrapper[4888]: E1201 20:34:17.453515 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:34:28 crc kubenswrapper[4888]: I1201 20:34:28.451674 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:34:28 crc kubenswrapper[4888]: E1201 20:34:28.452620 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:34:39 crc kubenswrapper[4888]: I1201 20:34:39.451833 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:34:39 crc kubenswrapper[4888]: E1201 20:34:39.455754 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:34:54 crc kubenswrapper[4888]: I1201 20:34:54.452346 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:34:54 crc kubenswrapper[4888]: E1201 20:34:54.460178 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:34:57 crc kubenswrapper[4888]: I1201 20:34:57.662008 4888 generic.go:334] "Generic (PLEG): container finished" podID="de16641c-d417-41cc-b34d-db32de5c8278" containerID="5286d261e14e7a88815f53e00c6e28b14279986791865c27a1b954d5371b6c6f" exitCode=0 Dec 01 20:34:57 crc kubenswrapper[4888]: I1201 20:34:57.662196 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" event={"ID":"de16641c-d417-41cc-b34d-db32de5c8278","Type":"ContainerDied","Data":"5286d261e14e7a88815f53e00c6e28b14279986791865c27a1b954d5371b6c6f"} Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.811271 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.844884 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host\") pod \"de16641c-d417-41cc-b34d-db32de5c8278\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.845037 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host" (OuterVolumeSpecName: "host") pod "de16641c-d417-41cc-b34d-db32de5c8278" (UID: "de16641c-d417-41cc-b34d-db32de5c8278"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.845218 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55896\" (UniqueName: \"kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896\") pod \"de16641c-d417-41cc-b34d-db32de5c8278\" (UID: \"de16641c-d417-41cc-b34d-db32de5c8278\") " Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.845699 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/de16641c-d417-41cc-b34d-db32de5c8278-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.848158 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-7l8dx"] Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.852149 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896" (OuterVolumeSpecName: "kube-api-access-55896") pod "de16641c-d417-41cc-b34d-db32de5c8278" (UID: "de16641c-d417-41cc-b34d-db32de5c8278"). InnerVolumeSpecName "kube-api-access-55896". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.857819 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-7l8dx"] Dec 01 20:34:58 crc kubenswrapper[4888]: I1201 20:34:58.946972 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55896\" (UniqueName: \"kubernetes.io/projected/de16641c-d417-41cc-b34d-db32de5c8278-kube-api-access-55896\") on node \"crc\" DevicePath \"\"" Dec 01 20:34:59 crc kubenswrapper[4888]: I1201 20:34:59.689994 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5" Dec 01 20:34:59 crc kubenswrapper[4888]: I1201 20:34:59.690053 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-7l8dx" Dec 01 20:34:59 crc kubenswrapper[4888]: E1201 20:34:59.887072 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde16641c_d417_41cc_b34d_db32de5c8278.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde16641c_d417_41cc_b34d_db32de5c8278.slice/crio-bf9d9f43929a481370d2ee449851164b3cd3162b6d7db90398ede8a31cac78e5\": RecentStats: unable to find data in memory cache]" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.003676 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xms5/crc-debug-vmwwv"] Dec 01 20:35:00 crc kubenswrapper[4888]: E1201 20:35:00.004464 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de16641c-d417-41cc-b34d-db32de5c8278" containerName="container-00" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.004482 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="de16641c-d417-41cc-b34d-db32de5c8278" containerName="container-00" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.004764 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="de16641c-d417-41cc-b34d-db32de5c8278" containerName="container-00" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.005409 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.068744 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgp5x\" (UniqueName: \"kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.069065 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.170343 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.170562 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgp5x\" (UniqueName: \"kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.170748 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.187559 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgp5x\" (UniqueName: \"kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x\") pod \"crc-debug-vmwwv\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.327132 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:00 crc kubenswrapper[4888]: W1201 20:35:00.363220 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89fcffdf_1538_4ea9_860f_ed330f7cc5b5.slice/crio-3ac46499d92a3a2e61fd3c6cf183d57b9d29d453d94a46657a86f81e628d6e5c WatchSource:0}: Error finding container 3ac46499d92a3a2e61fd3c6cf183d57b9d29d453d94a46657a86f81e628d6e5c: Status 404 returned error can't find the container with id 3ac46499d92a3a2e61fd3c6cf183d57b9d29d453d94a46657a86f81e628d6e5c Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.465074 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de16641c-d417-41cc-b34d-db32de5c8278" path="/var/lib/kubelet/pods/de16641c-d417-41cc-b34d-db32de5c8278/volumes" Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.701988 4888 generic.go:334] "Generic (PLEG): container finished" podID="89fcffdf-1538-4ea9-860f-ed330f7cc5b5" containerID="8020af78c5aa5ab0cf63c24d77766da2c237f41d58f5d5914dc72eb7a6b27112" exitCode=0 Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.702082 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" event={"ID":"89fcffdf-1538-4ea9-860f-ed330f7cc5b5","Type":"ContainerDied","Data":"8020af78c5aa5ab0cf63c24d77766da2c237f41d58f5d5914dc72eb7a6b27112"} Dec 01 20:35:00 crc kubenswrapper[4888]: I1201 20:35:00.702372 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" event={"ID":"89fcffdf-1538-4ea9-860f-ed330f7cc5b5","Type":"ContainerStarted","Data":"3ac46499d92a3a2e61fd3c6cf183d57b9d29d453d94a46657a86f81e628d6e5c"} Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.131713 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-vmwwv"] Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.139594 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-vmwwv"] Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.797901 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.898763 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host\") pod \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.898824 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host" (OuterVolumeSpecName: "host") pod "89fcffdf-1538-4ea9-860f-ed330f7cc5b5" (UID: "89fcffdf-1538-4ea9-860f-ed330f7cc5b5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.899167 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgp5x\" (UniqueName: \"kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x\") pod \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\" (UID: \"89fcffdf-1538-4ea9-860f-ed330f7cc5b5\") " Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.899722 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:01 crc kubenswrapper[4888]: I1201 20:35:01.904563 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x" (OuterVolumeSpecName: "kube-api-access-rgp5x") pod "89fcffdf-1538-4ea9-860f-ed330f7cc5b5" (UID: "89fcffdf-1538-4ea9-860f-ed330f7cc5b5"). InnerVolumeSpecName "kube-api-access-rgp5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.002007 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgp5x\" (UniqueName: \"kubernetes.io/projected/89fcffdf-1538-4ea9-860f-ed330f7cc5b5-kube-api-access-rgp5x\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.305571 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9xms5/crc-debug-6gbw9"] Dec 01 20:35:02 crc kubenswrapper[4888]: E1201 20:35:02.305930 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89fcffdf-1538-4ea9-860f-ed330f7cc5b5" containerName="container-00" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.305941 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="89fcffdf-1538-4ea9-860f-ed330f7cc5b5" containerName="container-00" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.306116 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="89fcffdf-1538-4ea9-860f-ed330f7cc5b5" containerName="container-00" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.306718 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.408373 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x26zq\" (UniqueName: \"kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.408543 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.477599 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89fcffdf-1538-4ea9-860f-ed330f7cc5b5" path="/var/lib/kubelet/pods/89fcffdf-1538-4ea9-860f-ed330f7cc5b5/volumes" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.510262 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.510389 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x26zq\" (UniqueName: \"kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.510463 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.528585 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x26zq\" (UniqueName: \"kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq\") pod \"crc-debug-6gbw9\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.631114 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:02 crc kubenswrapper[4888]: W1201 20:35:02.666566 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6201aca_3a7c_4d66_bbdc_21a5cb451750.slice/crio-1a577ef12bc9d5da0fc149155c92a5f60c46a22fda14d2dc7db947c0c3657743 WatchSource:0}: Error finding container 1a577ef12bc9d5da0fc149155c92a5f60c46a22fda14d2dc7db947c0c3657743: Status 404 returned error can't find the container with id 1a577ef12bc9d5da0fc149155c92a5f60c46a22fda14d2dc7db947c0c3657743 Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.722907 4888 scope.go:117] "RemoveContainer" containerID="8020af78c5aa5ab0cf63c24d77766da2c237f41d58f5d5914dc72eb7a6b27112" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.723021 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-vmwwv" Dec 01 20:35:02 crc kubenswrapper[4888]: I1201 20:35:02.725259 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" event={"ID":"e6201aca-3a7c-4d66-bbdc-21a5cb451750","Type":"ContainerStarted","Data":"1a577ef12bc9d5da0fc149155c92a5f60c46a22fda14d2dc7db947c0c3657743"} Dec 01 20:35:03 crc kubenswrapper[4888]: I1201 20:35:03.736032 4888 generic.go:334] "Generic (PLEG): container finished" podID="e6201aca-3a7c-4d66-bbdc-21a5cb451750" containerID="ab4c8176c742e69b1a74ed045efadfcff6c474b2126816e5df7d777f7aa92c41" exitCode=0 Dec 01 20:35:03 crc kubenswrapper[4888]: I1201 20:35:03.736147 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" event={"ID":"e6201aca-3a7c-4d66-bbdc-21a5cb451750","Type":"ContainerDied","Data":"ab4c8176c742e69b1a74ed045efadfcff6c474b2126816e5df7d777f7aa92c41"} Dec 01 20:35:03 crc kubenswrapper[4888]: I1201 20:35:03.775259 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-6gbw9"] Dec 01 20:35:03 crc kubenswrapper[4888]: I1201 20:35:03.782977 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xms5/crc-debug-6gbw9"] Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.850158 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.958240 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x26zq\" (UniqueName: \"kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq\") pod \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.958409 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host\") pod \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\" (UID: \"e6201aca-3a7c-4d66-bbdc-21a5cb451750\") " Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.958699 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host" (OuterVolumeSpecName: "host") pod "e6201aca-3a7c-4d66-bbdc-21a5cb451750" (UID: "e6201aca-3a7c-4d66-bbdc-21a5cb451750"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.959363 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6201aca-3a7c-4d66-bbdc-21a5cb451750-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:04 crc kubenswrapper[4888]: I1201 20:35:04.963645 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq" (OuterVolumeSpecName: "kube-api-access-x26zq") pod "e6201aca-3a7c-4d66-bbdc-21a5cb451750" (UID: "e6201aca-3a7c-4d66-bbdc-21a5cb451750"). InnerVolumeSpecName "kube-api-access-x26zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:35:05 crc kubenswrapper[4888]: I1201 20:35:05.061516 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x26zq\" (UniqueName: \"kubernetes.io/projected/e6201aca-3a7c-4d66-bbdc-21a5cb451750-kube-api-access-x26zq\") on node \"crc\" DevicePath \"\"" Dec 01 20:35:05 crc kubenswrapper[4888]: I1201 20:35:05.451101 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:35:05 crc kubenswrapper[4888]: E1201 20:35:05.451367 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:35:05 crc kubenswrapper[4888]: I1201 20:35:05.755602 4888 scope.go:117] "RemoveContainer" containerID="ab4c8176c742e69b1a74ed045efadfcff6c474b2126816e5df7d777f7aa92c41" Dec 01 20:35:05 crc kubenswrapper[4888]: I1201 20:35:05.755664 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/crc-debug-6gbw9" Dec 01 20:35:06 crc kubenswrapper[4888]: I1201 20:35:06.317839 4888 scope.go:117] "RemoveContainer" containerID="5a1acab26ed5f8e4802a026e26674d5683debb6b3e039e7e3c5e4bb676594408" Dec 01 20:35:06 crc kubenswrapper[4888]: I1201 20:35:06.338545 4888 scope.go:117] "RemoveContainer" containerID="de7177e7d0f6dc6bccd9229ad5dc1d30639c11656e376ee5455e5a7e5d7ac87f" Dec 01 20:35:06 crc kubenswrapper[4888]: I1201 20:35:06.364378 4888 scope.go:117] "RemoveContainer" containerID="8af216b28f7abf966bf05d957b91b8b4f6ecb1cdd43db25386ed14d8f933d0dd" Dec 01 20:35:06 crc kubenswrapper[4888]: I1201 20:35:06.462516 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6201aca-3a7c-4d66-bbdc-21a5cb451750" path="/var/lib/kubelet/pods/e6201aca-3a7c-4d66-bbdc-21a5cb451750/volumes" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.309708 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75669cfdf8-fmlxq_a887275f-f805-4f46-962b-d54149803ab3/barbican-api/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.373642 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75669cfdf8-fmlxq_a887275f-f805-4f46-962b-d54149803ab3/barbican-api-log/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.474331 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8db6f5c5d-bb5x6_af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f/barbican-keystone-listener/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.590694 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8db6f5c5d-bb5x6_af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f/barbican-keystone-listener-log/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.700231 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79c4fd898c-nlfgq_13de5904-4edb-417c-aa16-c1690ba7a828/barbican-worker/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.708039 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79c4fd898c-nlfgq_13de5904-4edb-417c-aa16-c1690ba7a828/barbican-worker-log/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.882435 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx_04974f6b-2545-433f-907d-5f97024057d4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:18 crc kubenswrapper[4888]: I1201 20:35:18.905488 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/ceilometer-central-agent/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.027346 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/ceilometer-notification-agent/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.058295 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/proxy-httpd/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.162828 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/sg-core/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.277990 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b0b6a93-78d0-43c8-b6fb-059da98cf4bd/cinder-api-log/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.323558 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b0b6a93-78d0-43c8-b6fb-059da98cf4bd/cinder-api/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.455109 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4d69bebc-c646-4da3-acc5-c7a3106c8100/cinder-scheduler/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.491675 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4d69bebc-c646-4da3-acc5-c7a3106c8100/probe/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.638944 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd_fc27098c-7ab4-4b1d-b5e2-2784d655cd9c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.700992 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk_c80b7e5e-b12e-49c1-8379-a7e33ad355fb/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:19 crc kubenswrapper[4888]: I1201 20:35:19.855478 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/init/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.039623 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/init/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.117927 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/dnsmasq-dns/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.158460 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lknxb_f547ee6c-51cc-47cb-b6c8-2df4311039b2/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.366506 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b9f3c82f-62d2-4a71-9832-223f1a735016/glance-httpd/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.405560 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b9f3c82f-62d2-4a71-9832-223f1a735016/glance-log/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.464228 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:35:20 crc kubenswrapper[4888]: E1201 20:35:20.464831 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.598879 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2f583b75-592c-438c-ae74-80dbd15c4eb1/glance-log/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.646501 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2f583b75-592c-438c-ae74-80dbd15c4eb1/glance-httpd/0.log" Dec 01 20:35:20 crc kubenswrapper[4888]: I1201 20:35:20.764118 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d8bccccd8-fw8bk_5006252a-8f29-475c-9847-e2d6662ff13b/horizon/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.024740 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz_2cc88e76-38aa-4d88-97e3-2d9829760fdf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.165002 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d8bccccd8-fw8bk_5006252a-8f29-475c-9847-e2d6662ff13b/horizon-log/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.232724 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pfrz2_61d5991b-f680-443e-8562-d4e755429abe/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.505599 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6c8cb9cfb7-n54hp_6c61e8e0-f725-45ed-8a82-740b3243120d/keystone-api/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.528538 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410321-t5grt_b059b2fe-58fd-46d6-8da6-ce215b31283a/keystone-cron/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.683658 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_26b544b6-2ef6-40f8-8cf6-0834d6d7bc39/kube-state-metrics/0.log" Dec 01 20:35:21 crc kubenswrapper[4888]: I1201 20:35:21.788159 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8_a5293702-c5a9-442d-b776-bed869af0d5d/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:22 crc kubenswrapper[4888]: I1201 20:35:22.170855 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-58df6cb45-qjhmp_a4b29995-f291-4e12-bfb1-fad0318b0416/neutron-api/0.log" Dec 01 20:35:22 crc kubenswrapper[4888]: I1201 20:35:22.206430 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-58df6cb45-qjhmp_a4b29995-f291-4e12-bfb1-fad0318b0416/neutron-httpd/0.log" Dec 01 20:35:22 crc kubenswrapper[4888]: I1201 20:35:22.377956 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m_b1abae65-0fe4-4a5c-afa8-824894f56643/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:22 crc kubenswrapper[4888]: I1201 20:35:22.900433 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b819abdf-a2be-4ee7-a019-15bfbc16578a/nova-cell0-conductor-conductor/0.log" Dec 01 20:35:22 crc kubenswrapper[4888]: I1201 20:35:22.964132 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed/nova-api-log/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.157571 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed/nova-api-api/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.183444 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_045ca115-d337-48ae-bfce-0df835c95bc8/nova-cell1-conductor-conductor/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.257762 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_086e1f96-58d1-42ab-a745-839383b65b7e/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.458504 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-m7dqd_51a678f1-7309-4200-bf0f-8329f67d2a5c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.570009 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_649d802d-a02d-403f-938c-8875b22f1e04/nova-metadata-log/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.837041 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e595d238-ccc8-452b-9e47-3439757e586f/nova-scheduler-scheduler/0.log" Dec 01 20:35:23 crc kubenswrapper[4888]: I1201 20:35:23.961986 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/mysql-bootstrap/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.164673 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/mysql-bootstrap/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.176700 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/galera/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.420728 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/mysql-bootstrap/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.592463 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/mysql-bootstrap/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.612309 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/galera/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.743332 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_649d802d-a02d-403f-938c-8875b22f1e04/nova-metadata-metadata/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.824434 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a72a3441-507d-44c7-b575-3c3a12fa6821/openstackclient/0.log" Dec 01 20:35:24 crc kubenswrapper[4888]: I1201 20:35:24.989538 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-b4v8q_644ca96f-aee4-40b9-957b-b18e28634a66/ovn-controller/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.032432 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-v92p7_9c965736-0751-48aa-bf50-db27978e0e91/openstack-network-exporter/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.235620 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server-init/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.439801 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server-init/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.479725 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovs-vswitchd/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.512229 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.749168 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-rtj2r_2e35aaa2-7b44-48c2-b94d-46f753c5698f/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.752972 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b3cf90fd-3f01-4cf5-bb00-9d5c2e374448/openstack-network-exporter/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.787997 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b3cf90fd-3f01-4cf5-bb00-9d5c2e374448/ovn-northd/0.log" Dec 01 20:35:25 crc kubenswrapper[4888]: I1201 20:35:25.983022 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b1e54c26-f189-448c-be1f-57d58fcd50bf/openstack-network-exporter/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.013375 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b1e54c26-f189-448c-be1f-57d58fcd50bf/ovsdbserver-nb/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.188459 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b3edb1da-ac59-4264-833a-499b13fb5071/openstack-network-exporter/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.274909 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b3edb1da-ac59-4264-833a-499b13fb5071/ovsdbserver-sb/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.375371 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6498684f7d-hltrn_599b1532-a5ae-4a98-bcc2-cc6a9d93cae3/placement-api/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.436294 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/setup-container/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.457132 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6498684f7d-hltrn_599b1532-a5ae-4a98-bcc2-cc6a9d93cae3/placement-log/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.857308 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/setup-container/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.914384 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/rabbitmq/0.log" Dec 01 20:35:26 crc kubenswrapper[4888]: I1201 20:35:26.929921 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/setup-container/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.106283 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/setup-container/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.129916 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/rabbitmq/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.187015 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm_f6a86735-753a-4ef6-9e99-5394105fcff0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.524295 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx_7e6c49f3-b69a-4381-b5d6-4a66e283d49f/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.525813 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-9qqjn_44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.736645 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-k46s7_49b24356-5b0c-43f4-a3d8-0a74c3aa57d9/ssh-known-hosts-edpm-deployment/0.log" Dec 01 20:35:27 crc kubenswrapper[4888]: I1201 20:35:27.777434 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-nbgcl_0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.022137 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5b97969cc9-55p9t_ddf684f3-00b0-4564-99ba-e29243df64fb/proxy-server/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.155482 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5b97969cc9-55p9t_ddf684f3-00b0-4564-99ba-e29243df64fb/proxy-httpd/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.208608 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s6dcl_2617e3a7-0ff0-4843-9126-a32cee9da7ca/swift-ring-rebalance/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.280303 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-auditor/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.392157 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-reaper/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.430966 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-replicator/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.527269 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-server/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.556294 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-auditor/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.630918 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-replicator/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.651622 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-server/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.743516 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-updater/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.774464 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-auditor/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.841522 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-expirer/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.904365 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-replicator/0.log" Dec 01 20:35:28 crc kubenswrapper[4888]: I1201 20:35:28.942889 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-server/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.073913 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-updater/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.097366 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/rsync/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.159109 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/swift-recon-cron/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.332535 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd_41059254-cd26-40bb-bd15-bd935fd4e7e1/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.423253 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_512845e9-2357-4129-bfb0-4e636ea554e9/tempest-tests-tempest-tests-runner/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.552318 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_54cbeba4-57d6-4934-8bc6-61cea77023c8/test-operator-logs-container/0.log" Dec 01 20:35:29 crc kubenswrapper[4888]: I1201 20:35:29.658389 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4_3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:35:31 crc kubenswrapper[4888]: I1201 20:35:31.451131 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:35:31 crc kubenswrapper[4888]: E1201 20:35:31.451617 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:35:37 crc kubenswrapper[4888]: I1201 20:35:37.986688 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac/memcached/0.log" Dec 01 20:35:43 crc kubenswrapper[4888]: I1201 20:35:43.451355 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:35:43 crc kubenswrapper[4888]: E1201 20:35:43.452133 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.451420 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:35:55 crc kubenswrapper[4888]: E1201 20:35:55.452262 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.659667 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-kjwft_1529e922-fd00-4f32-878a-d8a322a7b6b7/kube-rbac-proxy/0.log" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.733209 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-kjwft_1529e922-fd00-4f32-878a-d8a322a7b6b7/manager/0.log" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.837475 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-f58dj_a363b1d3-f519-41df-bdf8-e80b83edab4d/kube-rbac-proxy/0.log" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.912145 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lbp4z_a0f5d22c-34c3-40c2-889c-b7900120919c/kube-rbac-proxy/0.log" Dec 01 20:35:55 crc kubenswrapper[4888]: I1201 20:35:55.947509 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-f58dj_a363b1d3-f519-41df-bdf8-e80b83edab4d/manager/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.051335 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lbp4z_a0f5d22c-34c3-40c2-889c-b7900120919c/manager/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.124421 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.303839 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.336046 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.353105 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.477396 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.519547 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/extract/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.532634 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.661360 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-jszb6_9f5355dc-205f-4dca-91cf-39209ca1a7b3/kube-rbac-proxy/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.770998 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-8wh4g_3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2/kube-rbac-proxy/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.785223 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-jszb6_9f5355dc-205f-4dca-91cf-39209ca1a7b3/manager/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.856742 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-8wh4g_3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2/manager/0.log" Dec 01 20:35:56 crc kubenswrapper[4888]: I1201 20:35:56.936692 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gqwgf_1b3586db-3f49-4ee4-aed0-5e4d469fad92/kube-rbac-proxy/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.000156 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gqwgf_1b3586db-3f49-4ee4-aed0-5e4d469fad92/manager/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.186702 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bjgvv_6cb92420-4e6c-4407-9a54-93f003d1c5e9/kube-rbac-proxy/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.251270 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bjgvv_6cb92420-4e6c-4407-9a54-93f003d1c5e9/manager/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.410021 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5mfth_da594cb2-bb6a-4028-a609-68385c474377/kube-rbac-proxy/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.475526 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5mfth_da594cb2-bb6a-4028-a609-68385c474377/manager/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.661221 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-446zc_b7f34996-fe75-4c30-9e22-022f644f7c89/kube-rbac-proxy/0.log" Dec 01 20:35:57 crc kubenswrapper[4888]: I1201 20:35:57.714995 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-446zc_b7f34996-fe75-4c30-9e22-022f644f7c89/manager/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.007210 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-sszrn_516e9598-68a9-431a-84af-725e3a053e66/kube-rbac-proxy/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.077377 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-sszrn_516e9598-68a9-431a-84af-725e3a053e66/manager/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.138072 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-pxs95_968d110b-5720-400e-9094-8ec39acb4cf6/kube-rbac-proxy/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.297471 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-pxs95_968d110b-5720-400e-9094-8ec39acb4cf6/manager/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.446561 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tcnh9_8ad47b0a-b049-45fa-afea-44eb4d5be85f/kube-rbac-proxy/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.471042 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tcnh9_8ad47b0a-b049-45fa-afea-44eb4d5be85f/manager/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.577302 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-z2xkh_4651cf7c-a7f9-4137-9d3b-6a656746f373/kube-rbac-proxy/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.819534 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-psjhd_922916d6-2e57-4087-b5ae-24c6318f180a/kube-rbac-proxy/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.825827 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-z2xkh_4651cf7c-a7f9-4137-9d3b-6a656746f373/manager/0.log" Dec 01 20:35:58 crc kubenswrapper[4888]: I1201 20:35:58.871883 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-psjhd_922916d6-2e57-4087-b5ae-24c6318f180a/manager/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.028028 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp_06a8d696-66ab-49ef-b858-2245cc6e0023/kube-rbac-proxy/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.116816 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp_06a8d696-66ab-49ef-b858-2245cc6e0023/manager/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.442140 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mwxl6_805e9225-e4df-4c8c-b543-29bec3f42292/registry-server/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.587548 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-d5b7696c6-7s4mq_bf82583b-b2be-41bb-af62-24e74142855f/operator/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.669359 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-v2mfg_ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02/kube-rbac-proxy/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.760833 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-v2mfg_ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02/manager/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.852559 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-d4tzz_7914a20a-7747-446f-a496-deecd734fb83/kube-rbac-proxy/0.log" Dec 01 20:35:59 crc kubenswrapper[4888]: I1201 20:35:59.999182 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-d4tzz_7914a20a-7747-446f-a496-deecd734fb83/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.079701 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ql8v9_e11be1d4-dbcb-4e6b-a97a-918425cb85ce/operator/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.211256 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-f6vhb_1f11c3a5-7276-48d5-9dc1-389ab98ffc11/kube-rbac-proxy/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.277346 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-f6vhb_1f11c3a5-7276-48d5-9dc1-389ab98ffc11/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.384261 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6c58f9c549-nbj7h_b01f5340-ffdb-4963-9e49-47dad6f75642/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.391095 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-g79qf_e75f9e52-ceaa-463a-ba65-ed651715c4f4/kube-rbac-proxy/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.526132 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-cht8z_b50af81b-6773-46f1-916e-0346848ba65e/kube-rbac-proxy/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.541417 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-g79qf_e75f9e52-ceaa-463a-ba65-ed651715c4f4/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.700559 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-cht8z_b50af81b-6773-46f1-916e-0346848ba65e/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.702902 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-ntzlv_f94185be-1233-4c97-add2-b6e2fcd22827/manager/0.log" Dec 01 20:36:00 crc kubenswrapper[4888]: I1201 20:36:00.729640 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-ntzlv_f94185be-1233-4c97-add2-b6e2fcd22827/kube-rbac-proxy/0.log" Dec 01 20:36:08 crc kubenswrapper[4888]: I1201 20:36:08.452267 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:36:08 crc kubenswrapper[4888]: E1201 20:36:08.452968 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:36:19 crc kubenswrapper[4888]: I1201 20:36:19.221503 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-42k5p_50b1e183-9a9a-4daa-a769-78bc53d20c41/control-plane-machine-set-operator/0.log" Dec 01 20:36:19 crc kubenswrapper[4888]: I1201 20:36:19.397619 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2w272_60a42fba-5e64-4a68-a9a3-e29ff836d97f/kube-rbac-proxy/0.log" Dec 01 20:36:19 crc kubenswrapper[4888]: I1201 20:36:19.435102 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2w272_60a42fba-5e64-4a68-a9a3-e29ff836d97f/machine-api-operator/0.log" Dec 01 20:36:23 crc kubenswrapper[4888]: I1201 20:36:23.452053 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:36:23 crc kubenswrapper[4888]: E1201 20:36:23.453021 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:36:32 crc kubenswrapper[4888]: I1201 20:36:32.502299 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-td6nx_68662703-dd1d-4a5f-8884-d79b491c4fe2/cert-manager-controller/0.log" Dec 01 20:36:32 crc kubenswrapper[4888]: I1201 20:36:32.671302 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-llsnr_5695a357-bd5b-42c3-952b-f2be7e800dce/cert-manager-cainjector/0.log" Dec 01 20:36:32 crc kubenswrapper[4888]: I1201 20:36:32.748585 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-6mj44_fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858/cert-manager-webhook/0.log" Dec 01 20:36:35 crc kubenswrapper[4888]: I1201 20:36:35.451446 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:36:35 crc kubenswrapper[4888]: E1201 20:36:35.453175 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.092241 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-jdzp6_e8b41a7b-e30b-40a3-9d94-89af1c9623b6/nmstate-console-plugin/0.log" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.348525 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-frhtc_a9567735-6e3f-46d7-aa56-837398be488b/nmstate-handler/0.log" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.392550 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qddf7_bc82e383-0b0c-4f71-84b0-8c1de3ba240a/kube-rbac-proxy/0.log" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.412202 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qddf7_bc82e383-0b0c-4f71-84b0-8c1de3ba240a/nmstate-metrics/0.log" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.528048 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-xjmvl_d4346417-1916-4764-949c-3f2a628501e1/nmstate-operator/0.log" Dec 01 20:36:45 crc kubenswrapper[4888]: I1201 20:36:45.644207 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-k2bjh_398ab1db-126e-4ea0-b429-a7563f68c127/nmstate-webhook/0.log" Dec 01 20:36:50 crc kubenswrapper[4888]: I1201 20:36:50.457827 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:36:50 crc kubenswrapper[4888]: E1201 20:36:50.458457 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:36:59 crc kubenswrapper[4888]: I1201 20:36:59.864814 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5s49p_5234563a-ff0f-42ed-b8da-24b76dc29ebc/kube-rbac-proxy/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.043175 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5s49p_5234563a-ff0f-42ed-b8da-24b76dc29ebc/controller/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.100932 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-wlhmh_76b0bcdf-1744-4b10-8576-7bf114e2ec63/frr-k8s-webhook-server/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.229208 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.425706 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.482972 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.490366 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.494762 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.668101 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.676797 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.690695 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.741589 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.895048 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.934292 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.938852 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:37:00 crc kubenswrapper[4888]: I1201 20:37:00.968122 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/controller/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.162392 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/kube-rbac-proxy/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.166090 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/frr-metrics/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.203212 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/kube-rbac-proxy-frr/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.355810 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/reloader/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.413992 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-86c8597757-vv42m_a9e96183-2604-4b4c-bc23-a48485783f33/manager/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.450977 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:37:01 crc kubenswrapper[4888]: E1201 20:37:01.451313 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.586678 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-75d75bc95b-g8xkf_9a88f138-a2b0-4826-8bba-dd3b7942d88b/webhook-server/0.log" Dec 01 20:37:01 crc kubenswrapper[4888]: I1201 20:37:01.830836 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9zffn_84659dfe-1cdd-43b7-bb53-8adbf22e4c20/kube-rbac-proxy/0.log" Dec 01 20:37:02 crc kubenswrapper[4888]: I1201 20:37:02.272675 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9zffn_84659dfe-1cdd-43b7-bb53-8adbf22e4c20/speaker/0.log" Dec 01 20:37:02 crc kubenswrapper[4888]: I1201 20:37:02.490420 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/frr/0.log" Dec 01 20:37:13 crc kubenswrapper[4888]: I1201 20:37:13.844976 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.236254 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.257011 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.271773 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.457241 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.474551 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.497751 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/extract/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.645266 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.838333 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.841032 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:37:14 crc kubenswrapper[4888]: I1201 20:37:14.864296 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.026727 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.042519 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/extract/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.089711 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.205003 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.389083 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.389591 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.410198 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.579523 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.638511 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:37:15 crc kubenswrapper[4888]: I1201 20:37:15.818310 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.009150 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/registry-server/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.046461 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.068643 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.092494 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.252801 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.257327 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.450917 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:37:16 crc kubenswrapper[4888]: E1201 20:37:16.451267 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.469930 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/2.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.545625 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/1.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.701617 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/registry-server/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.737928 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.941701 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.943888 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:37:16 crc kubenswrapper[4888]: I1201 20:37:16.954345 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.101757 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.128670 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.271109 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/registry-server/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.330805 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.491706 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.503741 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.536314 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.678674 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:37:17 crc kubenswrapper[4888]: I1201 20:37:17.727342 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:37:18 crc kubenswrapper[4888]: I1201 20:37:18.157705 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/registry-server/0.log" Dec 01 20:37:27 crc kubenswrapper[4888]: I1201 20:37:27.450890 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:37:27 crc kubenswrapper[4888]: E1201 20:37:27.451612 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:37:38 crc kubenswrapper[4888]: I1201 20:37:38.452030 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:37:38 crc kubenswrapper[4888]: E1201 20:37:38.452794 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:37:50 crc kubenswrapper[4888]: E1201 20:37:50.190364 4888 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.163:35024->38.102.83.163:40487: write tcp 38.102.83.163:35024->38.102.83.163:40487: write: broken pipe Dec 01 20:37:50 crc kubenswrapper[4888]: I1201 20:37:50.458078 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:37:51 crc kubenswrapper[4888]: I1201 20:37:51.297722 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791"} Dec 01 20:38:33 crc kubenswrapper[4888]: I1201 20:38:33.860571 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:33 crc kubenswrapper[4888]: E1201 20:38:33.862265 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6201aca-3a7c-4d66-bbdc-21a5cb451750" containerName="container-00" Dec 01 20:38:33 crc kubenswrapper[4888]: I1201 20:38:33.862284 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6201aca-3a7c-4d66-bbdc-21a5cb451750" containerName="container-00" Dec 01 20:38:33 crc kubenswrapper[4888]: I1201 20:38:33.880901 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6201aca-3a7c-4d66-bbdc-21a5cb451750" containerName="container-00" Dec 01 20:38:33 crc kubenswrapper[4888]: I1201 20:38:33.905227 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:33 crc kubenswrapper[4888]: I1201 20:38:33.909260 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.076156 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89gq8\" (UniqueName: \"kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.076323 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.076386 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.177734 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89gq8\" (UniqueName: \"kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.178223 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.178676 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.178735 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.179014 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.206368 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89gq8\" (UniqueName: \"kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8\") pod \"community-operators-vllfv\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.242079 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:34 crc kubenswrapper[4888]: I1201 20:38:34.772406 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:34 crc kubenswrapper[4888]: W1201 20:38:34.777808 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8408fe2e_aba2_4db1_898d_d31888b31260.slice/crio-dc99ce8e8def422605b3b04a948662cfedd722ad1a160155ab5dc6e553e021e9 WatchSource:0}: Error finding container dc99ce8e8def422605b3b04a948662cfedd722ad1a160155ab5dc6e553e021e9: Status 404 returned error can't find the container with id dc99ce8e8def422605b3b04a948662cfedd722ad1a160155ab5dc6e553e021e9 Dec 01 20:38:35 crc kubenswrapper[4888]: I1201 20:38:35.681455 4888 generic.go:334] "Generic (PLEG): container finished" podID="8408fe2e-aba2-4db1-898d-d31888b31260" containerID="066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e" exitCode=0 Dec 01 20:38:35 crc kubenswrapper[4888]: I1201 20:38:35.682821 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerDied","Data":"066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e"} Dec 01 20:38:35 crc kubenswrapper[4888]: I1201 20:38:35.682936 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerStarted","Data":"dc99ce8e8def422605b3b04a948662cfedd722ad1a160155ab5dc6e553e021e9"} Dec 01 20:38:35 crc kubenswrapper[4888]: I1201 20:38:35.685615 4888 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 20:38:36 crc kubenswrapper[4888]: I1201 20:38:36.696107 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerStarted","Data":"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb"} Dec 01 20:38:37 crc kubenswrapper[4888]: I1201 20:38:37.704854 4888 generic.go:334] "Generic (PLEG): container finished" podID="8408fe2e-aba2-4db1-898d-d31888b31260" containerID="36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb" exitCode=0 Dec 01 20:38:37 crc kubenswrapper[4888]: I1201 20:38:37.705003 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerDied","Data":"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb"} Dec 01 20:38:38 crc kubenswrapper[4888]: I1201 20:38:38.730560 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerStarted","Data":"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3"} Dec 01 20:38:38 crc kubenswrapper[4888]: I1201 20:38:38.750912 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vllfv" podStartSLOduration=2.941735833 podStartE2EDuration="5.750892745s" podCreationTimestamp="2025-12-01 20:38:33 +0000 UTC" firstStartedPulling="2025-12-01 20:38:35.685359833 +0000 UTC m=+3915.556389757" lastFinishedPulling="2025-12-01 20:38:38.494516755 +0000 UTC m=+3918.365546669" observedRunningTime="2025-12-01 20:38:38.745867393 +0000 UTC m=+3918.616897307" watchObservedRunningTime="2025-12-01 20:38:38.750892745 +0000 UTC m=+3918.621922659" Dec 01 20:38:44 crc kubenswrapper[4888]: I1201 20:38:44.242834 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:44 crc kubenswrapper[4888]: I1201 20:38:44.243447 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:44 crc kubenswrapper[4888]: I1201 20:38:44.292109 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:44 crc kubenswrapper[4888]: I1201 20:38:44.863763 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:44 crc kubenswrapper[4888]: I1201 20:38:44.944940 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:46 crc kubenswrapper[4888]: I1201 20:38:46.799551 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vllfv" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="registry-server" containerID="cri-o://e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3" gracePeriod=2 Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.377328 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.554482 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities\") pod \"8408fe2e-aba2-4db1-898d-d31888b31260\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.554996 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89gq8\" (UniqueName: \"kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8\") pod \"8408fe2e-aba2-4db1-898d-d31888b31260\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.555026 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content\") pod \"8408fe2e-aba2-4db1-898d-d31888b31260\" (UID: \"8408fe2e-aba2-4db1-898d-d31888b31260\") " Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.555512 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities" (OuterVolumeSpecName: "utilities") pod "8408fe2e-aba2-4db1-898d-d31888b31260" (UID: "8408fe2e-aba2-4db1-898d-d31888b31260"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.657020 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.810149 4888 generic.go:334] "Generic (PLEG): container finished" podID="8408fe2e-aba2-4db1-898d-d31888b31260" containerID="e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3" exitCode=0 Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.810212 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerDied","Data":"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3"} Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.810246 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vllfv" event={"ID":"8408fe2e-aba2-4db1-898d-d31888b31260","Type":"ContainerDied","Data":"dc99ce8e8def422605b3b04a948662cfedd722ad1a160155ab5dc6e553e021e9"} Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.810280 4888 scope.go:117] "RemoveContainer" containerID="e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.811095 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vllfv" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.830133 4888 scope.go:117] "RemoveContainer" containerID="36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb" Dec 01 20:38:47 crc kubenswrapper[4888]: I1201 20:38:47.996463 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8" (OuterVolumeSpecName: "kube-api-access-89gq8") pod "8408fe2e-aba2-4db1-898d-d31888b31260" (UID: "8408fe2e-aba2-4db1-898d-d31888b31260"). InnerVolumeSpecName "kube-api-access-89gq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.008489 4888 scope.go:117] "RemoveContainer" containerID="066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.063507 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89gq8\" (UniqueName: \"kubernetes.io/projected/8408fe2e-aba2-4db1-898d-d31888b31260-kube-api-access-89gq8\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.108383 4888 scope.go:117] "RemoveContainer" containerID="e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3" Dec 01 20:38:48 crc kubenswrapper[4888]: E1201 20:38:48.111475 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3\": container with ID starting with e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3 not found: ID does not exist" containerID="e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.111518 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3"} err="failed to get container status \"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3\": rpc error: code = NotFound desc = could not find container \"e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3\": container with ID starting with e93bf9af06b7536edee7c636bbf365fa3ea07fb284bdf95813197d8237e628e3 not found: ID does not exist" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.111544 4888 scope.go:117] "RemoveContainer" containerID="36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb" Dec 01 20:38:48 crc kubenswrapper[4888]: E1201 20:38:48.111904 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb\": container with ID starting with 36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb not found: ID does not exist" containerID="36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.111977 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb"} err="failed to get container status \"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb\": rpc error: code = NotFound desc = could not find container \"36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb\": container with ID starting with 36e5f2181be53864fb45f160d92918dc11814a618dee696223e0e1694f26b0eb not found: ID does not exist" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.112002 4888 scope.go:117] "RemoveContainer" containerID="066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e" Dec 01 20:38:48 crc kubenswrapper[4888]: E1201 20:38:48.112256 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e\": container with ID starting with 066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e not found: ID does not exist" containerID="066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.112282 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e"} err="failed to get container status \"066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e\": rpc error: code = NotFound desc = could not find container \"066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e\": container with ID starting with 066f9154e7ed03be60b20fd971b600d520f517b175c9560e01d428bb40a9d91e not found: ID does not exist" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.325117 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8408fe2e-aba2-4db1-898d-d31888b31260" (UID: "8408fe2e-aba2-4db1-898d-d31888b31260"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.368162 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8408fe2e-aba2-4db1-898d-d31888b31260-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.443494 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:48 crc kubenswrapper[4888]: I1201 20:38:48.468275 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vllfv"] Dec 01 20:38:50 crc kubenswrapper[4888]: I1201 20:38:50.487505 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" path="/var/lib/kubelet/pods/8408fe2e-aba2-4db1-898d-d31888b31260/volumes" Dec 01 20:39:00 crc kubenswrapper[4888]: I1201 20:39:00.916634 4888 generic.go:334] "Generic (PLEG): container finished" podID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerID="880c299ae69b61d21fe0a9493675cb35e586d54d462afb77cb8081a15b55e59e" exitCode=0 Dec 01 20:39:00 crc kubenswrapper[4888]: I1201 20:39:00.916715 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9xms5/must-gather-6sjdk" event={"ID":"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5","Type":"ContainerDied","Data":"880c299ae69b61d21fe0a9493675cb35e586d54d462afb77cb8081a15b55e59e"} Dec 01 20:39:00 crc kubenswrapper[4888]: I1201 20:39:00.917918 4888 scope.go:117] "RemoveContainer" containerID="880c299ae69b61d21fe0a9493675cb35e586d54d462afb77cb8081a15b55e59e" Dec 01 20:39:01 crc kubenswrapper[4888]: I1201 20:39:01.501837 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xms5_must-gather-6sjdk_922e5a7f-81ce-45f2-a6ed-52d1f48f50f5/gather/0.log" Dec 01 20:39:08 crc kubenswrapper[4888]: I1201 20:39:08.542201 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9xms5/must-gather-6sjdk"] Dec 01 20:39:08 crc kubenswrapper[4888]: I1201 20:39:08.543142 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9xms5/must-gather-6sjdk" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="copy" containerID="cri-o://406770a9b843388fd9373eb52f3f456dee5932508c31da7335ae734039f94e2f" gracePeriod=2 Dec 01 20:39:08 crc kubenswrapper[4888]: I1201 20:39:08.581015 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9xms5/must-gather-6sjdk"] Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.011417 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xms5_must-gather-6sjdk_922e5a7f-81ce-45f2-a6ed-52d1f48f50f5/copy/0.log" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.012071 4888 generic.go:334] "Generic (PLEG): container finished" podID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerID="406770a9b843388fd9373eb52f3f456dee5932508c31da7335ae734039f94e2f" exitCode=143 Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.012117 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f140546bd29fb0649cfdd80ee30a9c29960415574d34740bd167ae21a705d86" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.063390 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9xms5_must-gather-6sjdk_922e5a7f-81ce-45f2-a6ed-52d1f48f50f5/copy/0.log" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.063775 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.083233 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output\") pod \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.083314 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcs7q\" (UniqueName: \"kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q\") pod \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\" (UID: \"922e5a7f-81ce-45f2-a6ed-52d1f48f50f5\") " Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.090322 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q" (OuterVolumeSpecName: "kube-api-access-kcs7q") pod "922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" (UID: "922e5a7f-81ce-45f2-a6ed-52d1f48f50f5"). InnerVolumeSpecName "kube-api-access-kcs7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.185617 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcs7q\" (UniqueName: \"kubernetes.io/projected/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-kube-api-access-kcs7q\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.239399 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" (UID: "922e5a7f-81ce-45f2-a6ed-52d1f48f50f5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:39:09 crc kubenswrapper[4888]: I1201 20:39:09.287513 4888 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 20:39:10 crc kubenswrapper[4888]: I1201 20:39:10.020122 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9xms5/must-gather-6sjdk" Dec 01 20:39:10 crc kubenswrapper[4888]: I1201 20:39:10.462367 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" path="/var/lib/kubelet/pods/922e5a7f-81ce-45f2-a6ed-52d1f48f50f5/volumes" Dec 01 20:39:50 crc kubenswrapper[4888]: I1201 20:39:50.038355 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:39:50 crc kubenswrapper[4888]: I1201 20:39:50.038858 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:40:06 crc kubenswrapper[4888]: I1201 20:40:06.598454 4888 scope.go:117] "RemoveContainer" containerID="406770a9b843388fd9373eb52f3f456dee5932508c31da7335ae734039f94e2f" Dec 01 20:40:06 crc kubenswrapper[4888]: I1201 20:40:06.618102 4888 scope.go:117] "RemoveContainer" containerID="880c299ae69b61d21fe0a9493675cb35e586d54d462afb77cb8081a15b55e59e" Dec 01 20:40:20 crc kubenswrapper[4888]: I1201 20:40:20.038427 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:40:20 crc kubenswrapper[4888]: I1201 20:40:20.039008 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.038609 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.039114 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.039168 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.039925 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.039984 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791" gracePeriod=600 Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.929640 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791" exitCode=0 Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.929729 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791"} Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.930163 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b"} Dec 01 20:40:50 crc kubenswrapper[4888]: I1201 20:40:50.930193 4888 scope.go:117] "RemoveContainer" containerID="6282185b49ee7844b38a72fceb3d0ddde1ef58ef34cfc20307c7dc17b25c5c03" Dec 01 20:41:06 crc kubenswrapper[4888]: I1201 20:41:06.712618 4888 scope.go:117] "RemoveContainer" containerID="5286d261e14e7a88815f53e00c6e28b14279986791865c27a1b954d5371b6c6f" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.098959 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:15 crc kubenswrapper[4888]: E1201 20:41:15.099968 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="extract-content" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.099984 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="extract-content" Dec 01 20:41:15 crc kubenswrapper[4888]: E1201 20:41:15.100009 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="copy" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100016 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="copy" Dec 01 20:41:15 crc kubenswrapper[4888]: E1201 20:41:15.100038 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="registry-server" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100046 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="registry-server" Dec 01 20:41:15 crc kubenswrapper[4888]: E1201 20:41:15.100076 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="gather" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100084 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="gather" Dec 01 20:41:15 crc kubenswrapper[4888]: E1201 20:41:15.100099 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="extract-utilities" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100107 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="extract-utilities" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100342 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="copy" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100373 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="922e5a7f-81ce-45f2-a6ed-52d1f48f50f5" containerName="gather" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.100384 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="8408fe2e-aba2-4db1-898d-d31888b31260" containerName="registry-server" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.102116 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.117533 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.196978 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.197075 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.197128 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szdbm\" (UniqueName: \"kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.298654 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.298739 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szdbm\" (UniqueName: \"kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.298818 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.299489 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.299482 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.320642 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szdbm\" (UniqueName: \"kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm\") pod \"redhat-marketplace-j2452\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:15 crc kubenswrapper[4888]: I1201 20:41:15.473129 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:16 crc kubenswrapper[4888]: I1201 20:41:16.069330 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:16 crc kubenswrapper[4888]: I1201 20:41:16.195214 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerStarted","Data":"b654cb346ae0d810f70087048bd890a4014d72190420dc1c64932d964e921700"} Dec 01 20:41:17 crc kubenswrapper[4888]: I1201 20:41:17.205852 4888 generic.go:334] "Generic (PLEG): container finished" podID="39d54fb7-f775-44cb-9f8d-80093af71821" containerID="aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa" exitCode=0 Dec 01 20:41:17 crc kubenswrapper[4888]: I1201 20:41:17.205968 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerDied","Data":"aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa"} Dec 01 20:41:18 crc kubenswrapper[4888]: I1201 20:41:18.218088 4888 generic.go:334] "Generic (PLEG): container finished" podID="39d54fb7-f775-44cb-9f8d-80093af71821" containerID="af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72" exitCode=0 Dec 01 20:41:18 crc kubenswrapper[4888]: I1201 20:41:18.218368 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerDied","Data":"af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72"} Dec 01 20:41:20 crc kubenswrapper[4888]: I1201 20:41:20.284706 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerStarted","Data":"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c"} Dec 01 20:41:20 crc kubenswrapper[4888]: I1201 20:41:20.305127 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j2452" podStartSLOduration=3.6897816519999997 podStartE2EDuration="5.305110687s" podCreationTimestamp="2025-12-01 20:41:15 +0000 UTC" firstStartedPulling="2025-12-01 20:41:17.207590321 +0000 UTC m=+4077.078620235" lastFinishedPulling="2025-12-01 20:41:18.822919356 +0000 UTC m=+4078.693949270" observedRunningTime="2025-12-01 20:41:20.302174234 +0000 UTC m=+4080.173204148" watchObservedRunningTime="2025-12-01 20:41:20.305110687 +0000 UTC m=+4080.176140591" Dec 01 20:41:25 crc kubenswrapper[4888]: I1201 20:41:25.473538 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:25 crc kubenswrapper[4888]: I1201 20:41:25.474087 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:25 crc kubenswrapper[4888]: I1201 20:41:25.521803 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:26 crc kubenswrapper[4888]: I1201 20:41:26.388655 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:26 crc kubenswrapper[4888]: I1201 20:41:26.433457 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:28 crc kubenswrapper[4888]: I1201 20:41:28.358117 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j2452" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="registry-server" containerID="cri-o://38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c" gracePeriod=2 Dec 01 20:41:28 crc kubenswrapper[4888]: I1201 20:41:28.891529 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.082709 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities\") pod \"39d54fb7-f775-44cb-9f8d-80093af71821\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.082766 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szdbm\" (UniqueName: \"kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm\") pod \"39d54fb7-f775-44cb-9f8d-80093af71821\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.082818 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content\") pod \"39d54fb7-f775-44cb-9f8d-80093af71821\" (UID: \"39d54fb7-f775-44cb-9f8d-80093af71821\") " Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.083668 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities" (OuterVolumeSpecName: "utilities") pod "39d54fb7-f775-44cb-9f8d-80093af71821" (UID: "39d54fb7-f775-44cb-9f8d-80093af71821"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.088911 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm" (OuterVolumeSpecName: "kube-api-access-szdbm") pod "39d54fb7-f775-44cb-9f8d-80093af71821" (UID: "39d54fb7-f775-44cb-9f8d-80093af71821"). InnerVolumeSpecName "kube-api-access-szdbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.102894 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39d54fb7-f775-44cb-9f8d-80093af71821" (UID: "39d54fb7-f775-44cb-9f8d-80093af71821"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.185316 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.185617 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szdbm\" (UniqueName: \"kubernetes.io/projected/39d54fb7-f775-44cb-9f8d-80093af71821-kube-api-access-szdbm\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.185633 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39d54fb7-f775-44cb-9f8d-80093af71821-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.369442 4888 generic.go:334] "Generic (PLEG): container finished" podID="39d54fb7-f775-44cb-9f8d-80093af71821" containerID="38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c" exitCode=0 Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.369510 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerDied","Data":"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c"} Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.369565 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j2452" event={"ID":"39d54fb7-f775-44cb-9f8d-80093af71821","Type":"ContainerDied","Data":"b654cb346ae0d810f70087048bd890a4014d72190420dc1c64932d964e921700"} Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.369525 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j2452" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.369615 4888 scope.go:117] "RemoveContainer" containerID="38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.396840 4888 scope.go:117] "RemoveContainer" containerID="af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.408075 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.417824 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j2452"] Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.428436 4888 scope.go:117] "RemoveContainer" containerID="aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.446839 4888 scope.go:117] "RemoveContainer" containerID="38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c" Dec 01 20:41:29 crc kubenswrapper[4888]: E1201 20:41:29.447238 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c\": container with ID starting with 38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c not found: ID does not exist" containerID="38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.447286 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c"} err="failed to get container status \"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c\": rpc error: code = NotFound desc = could not find container \"38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c\": container with ID starting with 38b6886a31d4860da94e944d14332a5fc86e1e2d5413d928d07cfac359f47b6c not found: ID does not exist" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.447315 4888 scope.go:117] "RemoveContainer" containerID="af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72" Dec 01 20:41:29 crc kubenswrapper[4888]: E1201 20:41:29.447751 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72\": container with ID starting with af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72 not found: ID does not exist" containerID="af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.447784 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72"} err="failed to get container status \"af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72\": rpc error: code = NotFound desc = could not find container \"af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72\": container with ID starting with af9e4381b9ce69a3a5044bcf2ed5f95c518f949819aaf94bb4f98fff18705d72 not found: ID does not exist" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.447805 4888 scope.go:117] "RemoveContainer" containerID="aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa" Dec 01 20:41:29 crc kubenswrapper[4888]: E1201 20:41:29.448053 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa\": container with ID starting with aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa not found: ID does not exist" containerID="aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa" Dec 01 20:41:29 crc kubenswrapper[4888]: I1201 20:41:29.448077 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa"} err="failed to get container status \"aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa\": rpc error: code = NotFound desc = could not find container \"aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa\": container with ID starting with aa163ae14b25a19f166bda0df967ca08678933ca0c9c5e12e7edd182174942fa not found: ID does not exist" Dec 01 20:41:30 crc kubenswrapper[4888]: I1201 20:41:30.463233 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" path="/var/lib/kubelet/pods/39d54fb7-f775-44cb-9f8d-80093af71821/volumes" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.574791 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wc2rz/must-gather-c9nvr"] Dec 01 20:41:49 crc kubenswrapper[4888]: E1201 20:41:49.577145 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="extract-content" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.577295 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="extract-content" Dec 01 20:41:49 crc kubenswrapper[4888]: E1201 20:41:49.577407 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="registry-server" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.577489 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="registry-server" Dec 01 20:41:49 crc kubenswrapper[4888]: E1201 20:41:49.577588 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="extract-utilities" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.577668 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="extract-utilities" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.579456 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="39d54fb7-f775-44cb-9f8d-80093af71821" containerName="registry-server" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.580888 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.588615 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wc2rz"/"openshift-service-ca.crt" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.589974 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wc2rz/must-gather-c9nvr"] Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.591643 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wc2rz"/"kube-root-ca.crt" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.599158 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wc2rz"/"default-dockercfg-94gpw" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.672380 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzc44\" (UniqueName: \"kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.672550 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.773799 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.774118 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzc44\" (UniqueName: \"kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.774302 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.799089 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzc44\" (UniqueName: \"kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44\") pod \"must-gather-c9nvr\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:49 crc kubenswrapper[4888]: I1201 20:41:49.903955 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:41:50 crc kubenswrapper[4888]: I1201 20:41:50.409268 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wc2rz/must-gather-c9nvr"] Dec 01 20:41:50 crc kubenswrapper[4888]: I1201 20:41:50.597853 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" event={"ID":"1c9cf28e-18b4-4606-bfc1-51c5093777b5","Type":"ContainerStarted","Data":"511a4913b2c8c67471d226605f3bb40cd871caf920ddbe34b6913939d3180061"} Dec 01 20:41:51 crc kubenswrapper[4888]: I1201 20:41:51.609562 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" event={"ID":"1c9cf28e-18b4-4606-bfc1-51c5093777b5","Type":"ContainerStarted","Data":"9d9858a43bcae0a0b1293d112698729efc90ffb291c8db61e48828e8b3e44203"} Dec 01 20:41:51 crc kubenswrapper[4888]: I1201 20:41:51.609794 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" event={"ID":"1c9cf28e-18b4-4606-bfc1-51c5093777b5","Type":"ContainerStarted","Data":"2f55238413978a0d4c3bd844be2eac27a0b86dc30264ec073c3f0046cf972b58"} Dec 01 20:41:51 crc kubenswrapper[4888]: I1201 20:41:51.635619 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" podStartSLOduration=2.635599497 podStartE2EDuration="2.635599497s" podCreationTimestamp="2025-12-01 20:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:41:51.632227542 +0000 UTC m=+4111.503257466" watchObservedRunningTime="2025-12-01 20:41:51.635599497 +0000 UTC m=+4111.506629411" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.604905 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-fv5bb"] Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.606855 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.664303 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4pgn\" (UniqueName: \"kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.664377 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.766634 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4pgn\" (UniqueName: \"kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.766762 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.766983 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.813238 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4pgn\" (UniqueName: \"kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn\") pod \"crc-debug-fv5bb\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: I1201 20:41:54.925475 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:41:54 crc kubenswrapper[4888]: W1201 20:41:54.971578 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7102a233_4415_4882_959a_0115ed512ea7.slice/crio-cc30bcca5c9e2c3305229d01fe45d7f5c6070b00cc72c7ef83ac904c1e9ca534 WatchSource:0}: Error finding container cc30bcca5c9e2c3305229d01fe45d7f5c6070b00cc72c7ef83ac904c1e9ca534: Status 404 returned error can't find the container with id cc30bcca5c9e2c3305229d01fe45d7f5c6070b00cc72c7ef83ac904c1e9ca534 Dec 01 20:41:55 crc kubenswrapper[4888]: I1201 20:41:55.641333 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" event={"ID":"7102a233-4415-4882-959a-0115ed512ea7","Type":"ContainerStarted","Data":"c21658ab3e1bfcf97c36899fb48a9149382972905e97bb01cf7d185ea130124f"} Dec 01 20:41:55 crc kubenswrapper[4888]: I1201 20:41:55.641843 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" event={"ID":"7102a233-4415-4882-959a-0115ed512ea7","Type":"ContainerStarted","Data":"cc30bcca5c9e2c3305229d01fe45d7f5c6070b00cc72c7ef83ac904c1e9ca534"} Dec 01 20:41:55 crc kubenswrapper[4888]: I1201 20:41:55.657474 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" podStartSLOduration=1.657456153 podStartE2EDuration="1.657456153s" podCreationTimestamp="2025-12-01 20:41:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:41:55.654825679 +0000 UTC m=+4115.525855603" watchObservedRunningTime="2025-12-01 20:41:55.657456153 +0000 UTC m=+4115.528486067" Dec 01 20:42:28 crc kubenswrapper[4888]: I1201 20:42:28.923883 4888 generic.go:334] "Generic (PLEG): container finished" podID="7102a233-4415-4882-959a-0115ed512ea7" containerID="c21658ab3e1bfcf97c36899fb48a9149382972905e97bb01cf7d185ea130124f" exitCode=0 Dec 01 20:42:28 crc kubenswrapper[4888]: I1201 20:42:28.923955 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" event={"ID":"7102a233-4415-4882-959a-0115ed512ea7","Type":"ContainerDied","Data":"c21658ab3e1bfcf97c36899fb48a9149382972905e97bb01cf7d185ea130124f"} Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.033655 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.067983 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-fv5bb"] Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.078089 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-fv5bb"] Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.119036 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4pgn\" (UniqueName: \"kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn\") pod \"7102a233-4415-4882-959a-0115ed512ea7\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.119271 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host\") pod \"7102a233-4415-4882-959a-0115ed512ea7\" (UID: \"7102a233-4415-4882-959a-0115ed512ea7\") " Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.119624 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host" (OuterVolumeSpecName: "host") pod "7102a233-4415-4882-959a-0115ed512ea7" (UID: "7102a233-4415-4882-959a-0115ed512ea7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.124473 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn" (OuterVolumeSpecName: "kube-api-access-x4pgn") pod "7102a233-4415-4882-959a-0115ed512ea7" (UID: "7102a233-4415-4882-959a-0115ed512ea7"). InnerVolumeSpecName "kube-api-access-x4pgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.221316 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7102a233-4415-4882-959a-0115ed512ea7-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.221351 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4pgn\" (UniqueName: \"kubernetes.io/projected/7102a233-4415-4882-959a-0115ed512ea7-kube-api-access-x4pgn\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.463703 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7102a233-4415-4882-959a-0115ed512ea7" path="/var/lib/kubelet/pods/7102a233-4415-4882-959a-0115ed512ea7/volumes" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.948807 4888 scope.go:117] "RemoveContainer" containerID="c21658ab3e1bfcf97c36899fb48a9149382972905e97bb01cf7d185ea130124f" Dec 01 20:42:30 crc kubenswrapper[4888]: I1201 20:42:30.948823 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-fv5bb" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.249413 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-hslhg"] Dec 01 20:42:31 crc kubenswrapper[4888]: E1201 20:42:31.249849 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7102a233-4415-4882-959a-0115ed512ea7" containerName="container-00" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.249863 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7102a233-4415-4882-959a-0115ed512ea7" containerName="container-00" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.250039 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7102a233-4415-4882-959a-0115ed512ea7" containerName="container-00" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.250673 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.445058 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pjd8\" (UniqueName: \"kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.445752 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.558362 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.559293 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pjd8\" (UniqueName: \"kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.562433 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.590982 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pjd8\" (UniqueName: \"kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8\") pod \"crc-debug-hslhg\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.869417 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:31 crc kubenswrapper[4888]: I1201 20:42:31.960619 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" event={"ID":"108366d5-d5b5-47c0-9d2f-680aeca8baa4","Type":"ContainerStarted","Data":"71a58ad4ee17645dbfbca33df898aef991281bcfaad146e63523bde47d288d06"} Dec 01 20:42:32 crc kubenswrapper[4888]: I1201 20:42:32.970547 4888 generic.go:334] "Generic (PLEG): container finished" podID="108366d5-d5b5-47c0-9d2f-680aeca8baa4" containerID="181f835f5736df8673abe4df8d34c8508ab7f3ba29a0c5cca5af47f0716a2d0c" exitCode=0 Dec 01 20:42:32 crc kubenswrapper[4888]: I1201 20:42:32.970598 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" event={"ID":"108366d5-d5b5-47c0-9d2f-680aeca8baa4","Type":"ContainerDied","Data":"181f835f5736df8673abe4df8d34c8508ab7f3ba29a0c5cca5af47f0716a2d0c"} Dec 01 20:42:33 crc kubenswrapper[4888]: I1201 20:42:33.469079 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-hslhg"] Dec 01 20:42:33 crc kubenswrapper[4888]: I1201 20:42:33.479056 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-hslhg"] Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.096541 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.211568 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host\") pod \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.211672 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host" (OuterVolumeSpecName: "host") pod "108366d5-d5b5-47c0-9d2f-680aeca8baa4" (UID: "108366d5-d5b5-47c0-9d2f-680aeca8baa4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.211742 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pjd8\" (UniqueName: \"kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8\") pod \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\" (UID: \"108366d5-d5b5-47c0-9d2f-680aeca8baa4\") " Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.212304 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/108366d5-d5b5-47c0-9d2f-680aeca8baa4-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.222391 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8" (OuterVolumeSpecName: "kube-api-access-4pjd8") pod "108366d5-d5b5-47c0-9d2f-680aeca8baa4" (UID: "108366d5-d5b5-47c0-9d2f-680aeca8baa4"). InnerVolumeSpecName "kube-api-access-4pjd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.314051 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pjd8\" (UniqueName: \"kubernetes.io/projected/108366d5-d5b5-47c0-9d2f-680aeca8baa4-kube-api-access-4pjd8\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.460245 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="108366d5-d5b5-47c0-9d2f-680aeca8baa4" path="/var/lib/kubelet/pods/108366d5-d5b5-47c0-9d2f-680aeca8baa4/volumes" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.653855 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-68xgv"] Dec 01 20:42:34 crc kubenswrapper[4888]: E1201 20:42:34.654331 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="108366d5-d5b5-47c0-9d2f-680aeca8baa4" containerName="container-00" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.654352 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="108366d5-d5b5-47c0-9d2f-680aeca8baa4" containerName="container-00" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.654530 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="108366d5-d5b5-47c0-9d2f-680aeca8baa4" containerName="container-00" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.655167 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.822502 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.822819 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtn9l\" (UniqueName: \"kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.924963 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.925043 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtn9l\" (UniqueName: \"kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.925625 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.949769 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtn9l\" (UniqueName: \"kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l\") pod \"crc-debug-68xgv\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.975761 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.991760 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-hslhg" Dec 01 20:42:34 crc kubenswrapper[4888]: I1201 20:42:34.991762 4888 scope.go:117] "RemoveContainer" containerID="181f835f5736df8673abe4df8d34c8508ab7f3ba29a0c5cca5af47f0716a2d0c" Dec 01 20:42:36 crc kubenswrapper[4888]: I1201 20:42:36.000542 4888 generic.go:334] "Generic (PLEG): container finished" podID="6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" containerID="e3546c0f90559523417a5d0ae13ba267a77408cd9e02acf326ea174f156068fa" exitCode=0 Dec 01 20:42:36 crc kubenswrapper[4888]: I1201 20:42:36.000622 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" event={"ID":"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7","Type":"ContainerDied","Data":"e3546c0f90559523417a5d0ae13ba267a77408cd9e02acf326ea174f156068fa"} Dec 01 20:42:36 crc kubenswrapper[4888]: I1201 20:42:36.000895 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" event={"ID":"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7","Type":"ContainerStarted","Data":"be18c65431479b790295591bf2429912990df2387aeb751aba81d2007742493a"} Dec 01 20:42:36 crc kubenswrapper[4888]: I1201 20:42:36.034807 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-68xgv"] Dec 01 20:42:36 crc kubenswrapper[4888]: I1201 20:42:36.043422 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wc2rz/crc-debug-68xgv"] Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.120165 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.263518 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtn9l\" (UniqueName: \"kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l\") pod \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.264054 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host\") pod \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\" (UID: \"6e914ef5-bc36-4e36-bd4f-adca03d8f2d7\") " Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.264117 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host" (OuterVolumeSpecName: "host") pod "6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" (UID: "6e914ef5-bc36-4e36-bd4f-adca03d8f2d7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.264980 4888 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-host\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.268591 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l" (OuterVolumeSpecName: "kube-api-access-mtn9l") pod "6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" (UID: "6e914ef5-bc36-4e36-bd4f-adca03d8f2d7"). InnerVolumeSpecName "kube-api-access-mtn9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:42:37 crc kubenswrapper[4888]: I1201 20:42:37.366817 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtn9l\" (UniqueName: \"kubernetes.io/projected/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7-kube-api-access-mtn9l\") on node \"crc\" DevicePath \"\"" Dec 01 20:42:38 crc kubenswrapper[4888]: I1201 20:42:38.019899 4888 scope.go:117] "RemoveContainer" containerID="e3546c0f90559523417a5d0ae13ba267a77408cd9e02acf326ea174f156068fa" Dec 01 20:42:38 crc kubenswrapper[4888]: I1201 20:42:38.019944 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/crc-debug-68xgv" Dec 01 20:42:38 crc kubenswrapper[4888]: I1201 20:42:38.466527 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" path="/var/lib/kubelet/pods/6e914ef5-bc36-4e36-bd4f-adca03d8f2d7/volumes" Dec 01 20:42:50 crc kubenswrapper[4888]: I1201 20:42:50.037298 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:42:50 crc kubenswrapper[4888]: I1201 20:42:50.037835 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:43:00 crc kubenswrapper[4888]: I1201 20:43:00.697988 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75669cfdf8-fmlxq_a887275f-f805-4f46-962b-d54149803ab3/barbican-api/0.log" Dec 01 20:43:00 crc kubenswrapper[4888]: I1201 20:43:00.827639 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-75669cfdf8-fmlxq_a887275f-f805-4f46-962b-d54149803ab3/barbican-api-log/0.log" Dec 01 20:43:00 crc kubenswrapper[4888]: I1201 20:43:00.904537 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8db6f5c5d-bb5x6_af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f/barbican-keystone-listener/0.log" Dec 01 20:43:00 crc kubenswrapper[4888]: I1201 20:43:00.957413 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-8db6f5c5d-bb5x6_af0454e8-5ae1-4ed7-b4f3-feb2d4c1703f/barbican-keystone-listener-log/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.236776 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79c4fd898c-nlfgq_13de5904-4edb-417c-aa16-c1690ba7a828/barbican-worker/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.263424 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-79c4fd898c-nlfgq_13de5904-4edb-417c-aa16-c1690ba7a828/barbican-worker-log/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.427919 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-cgpcx_04974f6b-2545-433f-907d-5f97024057d4/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.526663 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/ceilometer-central-agent/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.580591 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/ceilometer-notification-agent/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.638101 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/proxy-httpd/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.690888 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2be11eda-a57e-402c-a39a-f72af50268ef/sg-core/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.796827 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b0b6a93-78d0-43c8-b6fb-059da98cf4bd/cinder-api-log/0.log" Dec 01 20:43:01 crc kubenswrapper[4888]: I1201 20:43:01.831951 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_6b0b6a93-78d0-43c8-b6fb-059da98cf4bd/cinder-api/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.005364 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4d69bebc-c646-4da3-acc5-c7a3106c8100/cinder-scheduler/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.096257 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_4d69bebc-c646-4da3-acc5-c7a3106c8100/probe/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.165338 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-5k5sd_fc27098c-7ab4-4b1d-b5e2-2784d655cd9c/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.294315 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mdgwk_c80b7e5e-b12e-49c1-8379-a7e33ad355fb/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.390474 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/init/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.525734 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/init/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.631091 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lknxb_f547ee6c-51cc-47cb-b6c8-2df4311039b2/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.662886 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-jjqkn_0366eee1-e2f6-4c97-a1e2-ed1e374e2021/dnsmasq-dns/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.833889 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b9f3c82f-62d2-4a71-9832-223f1a735016/glance-httpd/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.857086 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_b9f3c82f-62d2-4a71-9832-223f1a735016/glance-log/0.log" Dec 01 20:43:02 crc kubenswrapper[4888]: I1201 20:43:02.984742 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2f583b75-592c-438c-ae74-80dbd15c4eb1/glance-httpd/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.041300 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_2f583b75-592c-438c-ae74-80dbd15c4eb1/glance-log/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.227803 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d8bccccd8-fw8bk_5006252a-8f29-475c-9847-e2d6662ff13b/horizon/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.420563 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-j5tqz_2cc88e76-38aa-4d88-97e3-2d9829760fdf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.661316 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-d8bccccd8-fw8bk_5006252a-8f29-475c-9847-e2d6662ff13b/horizon-log/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.728626 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-pfrz2_61d5991b-f680-443e-8562-d4e755429abe/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.807237 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:03 crc kubenswrapper[4888]: E1201 20:43:03.807743 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" containerName="container-00" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.807764 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" containerName="container-00" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.808015 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e914ef5-bc36-4e36-bd4f-adca03d8f2d7" containerName="container-00" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.809664 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.820035 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.845361 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6c8cb9cfb7-n54hp_6c61e8e0-f725-45ed-8a82-740b3243120d/keystone-api/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.891677 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.891732 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.891793 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lngxx\" (UniqueName: \"kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.989714 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29410321-t5grt_b059b2fe-58fd-46d6-8da6-ce215b31283a/keystone-cron/0.log" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.994075 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.994145 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.994232 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lngxx\" (UniqueName: \"kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.994676 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:03 crc kubenswrapper[4888]: I1201 20:43:03.994715 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:04 crc kubenswrapper[4888]: I1201 20:43:04.018625 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lngxx\" (UniqueName: \"kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx\") pod \"redhat-operators-4wq24\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:04 crc kubenswrapper[4888]: I1201 20:43:04.092895 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_26b544b6-2ef6-40f8-8cf6-0834d6d7bc39/kube-state-metrics/0.log" Dec 01 20:43:04 crc kubenswrapper[4888]: I1201 20:43:04.139780 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:04 crc kubenswrapper[4888]: I1201 20:43:04.372201 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-p5qq8_a5293702-c5a9-442d-b776-bed869af0d5d/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:04 crc kubenswrapper[4888]: I1201 20:43:04.641586 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.055673 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-58df6cb45-qjhmp_a4b29995-f291-4e12-bfb1-fad0318b0416/neutron-api/0.log" Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.148684 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-58df6cb45-qjhmp_a4b29995-f291-4e12-bfb1-fad0318b0416/neutron-httpd/0.log" Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.311050 4888 generic.go:334] "Generic (PLEG): container finished" podID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerID="14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87" exitCode=0 Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.311093 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerDied","Data":"14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87"} Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.311132 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerStarted","Data":"12803675a5c6639f3501ea5b6ea467b94dbab46a8ea78b6eadc0d6439eeff5e0"} Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.475694 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-x9q8m_b1abae65-0fe4-4a5c-afa8-824894f56643/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:05 crc kubenswrapper[4888]: I1201 20:43:05.998338 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed/nova-api-log/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.136489 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b819abdf-a2be-4ee7-a019-15bfbc16578a/nova-cell0-conductor-conductor/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.400006 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8c3ae8a7-95af-4a5c-b4a7-70f0950b83ed/nova-api-api/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.429480 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_045ca115-d337-48ae-bfce-0df835c95bc8/nova-cell1-conductor-conductor/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.544968 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_086e1f96-58d1-42ab-a745-839383b65b7e/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.755500 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-m7dqd_51a678f1-7309-4200-bf0f-8329f67d2a5c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:06 crc kubenswrapper[4888]: I1201 20:43:06.907726 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_649d802d-a02d-403f-938c-8875b22f1e04/nova-metadata-log/0.log" Dec 01 20:43:07 crc kubenswrapper[4888]: I1201 20:43:07.232859 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e595d238-ccc8-452b-9e47-3439757e586f/nova-scheduler-scheduler/0.log" Dec 01 20:43:07 crc kubenswrapper[4888]: I1201 20:43:07.235730 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/mysql-bootstrap/0.log" Dec 01 20:43:07 crc kubenswrapper[4888]: I1201 20:43:07.472353 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/mysql-bootstrap/0.log" Dec 01 20:43:07 crc kubenswrapper[4888]: I1201 20:43:07.503420 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_879f6704-c02e-420a-90ec-23bfb1ce35fe/galera/0.log" Dec 01 20:43:07 crc kubenswrapper[4888]: I1201 20:43:07.689582 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/mysql-bootstrap/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.014408 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/galera/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.023775 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bca8bbd3-bfa6-4767-8196-e085c6160a7f/mysql-bootstrap/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.216849 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a72a3441-507d-44c7-b575-3c3a12fa6821/openstackclient/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.315012 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-b4v8q_644ca96f-aee4-40b9-957b-b18e28634a66/ovn-controller/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.341501 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerStarted","Data":"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2"} Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.402716 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.405566 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.405690 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.490602 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5rrc\" (UniqueName: \"kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.490689 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.490736 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.584137 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-v92p7_9c965736-0751-48aa-bf50-db27978e0e91/openstack-network-exporter/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.593406 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5rrc\" (UniqueName: \"kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.593946 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.594106 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.594641 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.595475 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.617129 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5rrc\" (UniqueName: \"kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc\") pod \"certified-operators-lgcs8\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.703243 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_649d802d-a02d-403f-938c-8875b22f1e04/nova-metadata-metadata/0.log" Dec 01 20:43:08 crc kubenswrapper[4888]: I1201 20:43:08.766729 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.315124 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server-init/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.353247 4888 generic.go:334] "Generic (PLEG): container finished" podID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerID="85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2" exitCode=0 Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.353296 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerDied","Data":"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2"} Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.426168 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.556880 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.579478 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovsdb-server-init/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.608568 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-9rcwl_7030c3c7-8abe-4d3f-9279-a90d581f551b/ovs-vswitchd/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.824573 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-rtj2r_2e35aaa2-7b44-48c2-b94d-46f753c5698f/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.910174 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b3cf90fd-3f01-4cf5-bb00-9d5c2e374448/openstack-network-exporter/0.log" Dec 01 20:43:09 crc kubenswrapper[4888]: I1201 20:43:09.911805 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_b3cf90fd-3f01-4cf5-bb00-9d5c2e374448/ovn-northd/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.130300 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b1e54c26-f189-448c-be1f-57d58fcd50bf/openstack-network-exporter/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.141603 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b1e54c26-f189-448c-be1f-57d58fcd50bf/ovsdbserver-nb/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.365877 4888 generic.go:334] "Generic (PLEG): container finished" podID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerID="1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1" exitCode=0 Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.365971 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerDied","Data":"1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1"} Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.366001 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerStarted","Data":"f30c78b428a770e25cb8150bd7d580af9e3920bb5d93890ca391c4eab8d8f139"} Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.368983 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerStarted","Data":"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db"} Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.414289 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4wq24" podStartSLOduration=2.724002564 podStartE2EDuration="7.414267175s" podCreationTimestamp="2025-12-01 20:43:03 +0000 UTC" firstStartedPulling="2025-12-01 20:43:05.317321497 +0000 UTC m=+4185.188351411" lastFinishedPulling="2025-12-01 20:43:10.007586108 +0000 UTC m=+4189.878616022" observedRunningTime="2025-12-01 20:43:10.411845067 +0000 UTC m=+4190.282874991" watchObservedRunningTime="2025-12-01 20:43:10.414267175 +0000 UTC m=+4190.285297089" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.430683 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b3edb1da-ac59-4264-833a-499b13fb5071/ovsdbserver-sb/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.431685 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b3edb1da-ac59-4264-833a-499b13fb5071/openstack-network-exporter/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.582906 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6498684f7d-hltrn_599b1532-a5ae-4a98-bcc2-cc6a9d93cae3/placement-api/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.728167 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/setup-container/0.log" Dec 01 20:43:10 crc kubenswrapper[4888]: I1201 20:43:10.808265 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6498684f7d-hltrn_599b1532-a5ae-4a98-bcc2-cc6a9d93cae3/placement-log/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.064654 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/rabbitmq/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.075749 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/setup-container/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.145721 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c47dabe8-c903-4454-82c5-3c4a28322366/setup-container/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.470881 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/rabbitmq/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.475638 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_ff7c4d43-2663-4f78-a40b-8a6dc418c31c/setup-container/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.516970 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-zwzjm_f6a86735-753a-4ef6-9e99-5394105fcff0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.771680 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-9qqjn_44d8536a-ae3d-4b72-bdfb-cb3fd6422d4c/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:11 crc kubenswrapper[4888]: I1201 20:43:11.915739 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-nv6jx_7e6c49f3-b69a-4381-b5d6-4a66e283d49f/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.267044 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-nbgcl_0bbd9b56-59fc-49ea-9cef-4e1e5e7af93a/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.364421 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-k46s7_49b24356-5b0c-43f4-a3d8-0a74c3aa57d9/ssh-known-hosts-edpm-deployment/0.log" Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.424722 4888 generic.go:334] "Generic (PLEG): container finished" podID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerID="f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b" exitCode=0 Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.424810 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerDied","Data":"f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b"} Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.943006 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-s6dcl_2617e3a7-0ff0-4843-9126-a32cee9da7ca/swift-ring-rebalance/0.log" Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.960567 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5b97969cc9-55p9t_ddf684f3-00b0-4564-99ba-e29243df64fb/proxy-server/0.log" Dec 01 20:43:12 crc kubenswrapper[4888]: I1201 20:43:12.998029 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5b97969cc9-55p9t_ddf684f3-00b0-4564-99ba-e29243df64fb/proxy-httpd/0.log" Dec 01 20:43:13 crc kubenswrapper[4888]: I1201 20:43:13.222813 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-auditor/0.log" Dec 01 20:43:13 crc kubenswrapper[4888]: I1201 20:43:13.251095 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-reaper/0.log" Dec 01 20:43:13 crc kubenswrapper[4888]: I1201 20:43:13.350831 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-replicator/0.log" Dec 01 20:43:13 crc kubenswrapper[4888]: I1201 20:43:13.463679 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/account-server/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.038254 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-auditor/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.099456 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-updater/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.131895 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-server/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.133919 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/container-replicator/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.139910 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.141356 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.345607 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-expirer/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.411930 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-auditor/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.412904 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-replicator/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.463037 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerStarted","Data":"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e"} Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.477407 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lgcs8" podStartSLOduration=3.267509336 podStartE2EDuration="6.477389858s" podCreationTimestamp="2025-12-01 20:43:08 +0000 UTC" firstStartedPulling="2025-12-01 20:43:10.368505081 +0000 UTC m=+4190.239535005" lastFinishedPulling="2025-12-01 20:43:13.578385623 +0000 UTC m=+4193.449415527" observedRunningTime="2025-12-01 20:43:14.476536334 +0000 UTC m=+4194.347566248" watchObservedRunningTime="2025-12-01 20:43:14.477389858 +0000 UTC m=+4194.348419772" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.502244 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-server/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.650873 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/rsync/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.746455 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/object-updater/0.log" Dec 01 20:43:14 crc kubenswrapper[4888]: I1201 20:43:14.835943 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_33b47f5a-af5b-41b4-9178-a956cd6d2101/swift-recon-cron/0.log" Dec 01 20:43:15 crc kubenswrapper[4888]: I1201 20:43:15.127446 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_512845e9-2357-4129-bfb0-4e636ea554e9/tempest-tests-tempest-tests-runner/0.log" Dec 01 20:43:15 crc kubenswrapper[4888]: I1201 20:43:15.195431 4888 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4wq24" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="registry-server" probeResult="failure" output=< Dec 01 20:43:15 crc kubenswrapper[4888]: timeout: failed to connect service ":50051" within 1s Dec 01 20:43:15 crc kubenswrapper[4888]: > Dec 01 20:43:15 crc kubenswrapper[4888]: I1201 20:43:15.235682 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2mdhd_41059254-cd26-40bb-bd15-bd935fd4e7e1/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:15 crc kubenswrapper[4888]: I1201 20:43:15.829037 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_54cbeba4-57d6-4934-8bc6-61cea77023c8/test-operator-logs-container/0.log" Dec 01 20:43:16 crc kubenswrapper[4888]: I1201 20:43:16.056904 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-m5lm4_3e3c0d8b-c2f8-4c4a-969a-2396daa6a50e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 20:43:18 crc kubenswrapper[4888]: I1201 20:43:18.767344 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:18 crc kubenswrapper[4888]: I1201 20:43:18.767769 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:18 crc kubenswrapper[4888]: I1201 20:43:18.816551 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:19 crc kubenswrapper[4888]: I1201 20:43:19.544922 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:19 crc kubenswrapper[4888]: I1201 20:43:19.600281 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:20 crc kubenswrapper[4888]: I1201 20:43:20.037162 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:43:20 crc kubenswrapper[4888]: I1201 20:43:20.037784 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:43:21 crc kubenswrapper[4888]: I1201 20:43:21.524015 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lgcs8" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="registry-server" containerID="cri-o://6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e" gracePeriod=2 Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.052842 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.231434 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content\") pod \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.231523 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities\") pod \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.231568 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5rrc\" (UniqueName: \"kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc\") pod \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\" (UID: \"ee8363ec-0467-4b7f-9b27-b17e8cf7af37\") " Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.235905 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities" (OuterVolumeSpecName: "utilities") pod "ee8363ec-0467-4b7f-9b27-b17e8cf7af37" (UID: "ee8363ec-0467-4b7f-9b27-b17e8cf7af37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.256910 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc" (OuterVolumeSpecName: "kube-api-access-n5rrc") pod "ee8363ec-0467-4b7f-9b27-b17e8cf7af37" (UID: "ee8363ec-0467-4b7f-9b27-b17e8cf7af37"). InnerVolumeSpecName "kube-api-access-n5rrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.334637 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.334671 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5rrc\" (UniqueName: \"kubernetes.io/projected/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-kube-api-access-n5rrc\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.353608 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee8363ec-0467-4b7f-9b27-b17e8cf7af37" (UID: "ee8363ec-0467-4b7f-9b27-b17e8cf7af37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.437056 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee8363ec-0467-4b7f-9b27-b17e8cf7af37-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.571435 4888 generic.go:334] "Generic (PLEG): container finished" podID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerID="6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e" exitCode=0 Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.571502 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerDied","Data":"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e"} Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.571533 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgcs8" event={"ID":"ee8363ec-0467-4b7f-9b27-b17e8cf7af37","Type":"ContainerDied","Data":"f30c78b428a770e25cb8150bd7d580af9e3920bb5d93890ca391c4eab8d8f139"} Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.571553 4888 scope.go:117] "RemoveContainer" containerID="6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.571558 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgcs8" Dec 01 20:43:22 crc kubenswrapper[4888]: E1201 20:43:22.597337 4888 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee8363ec_0467_4b7f_9b27_b17e8cf7af37.slice\": RecentStats: unable to find data in memory cache]" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.626153 4888 scope.go:117] "RemoveContainer" containerID="f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.627047 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.641305 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lgcs8"] Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.674919 4888 scope.go:117] "RemoveContainer" containerID="1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.711778 4888 scope.go:117] "RemoveContainer" containerID="6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e" Dec 01 20:43:22 crc kubenswrapper[4888]: E1201 20:43:22.716802 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e\": container with ID starting with 6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e not found: ID does not exist" containerID="6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.716873 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e"} err="failed to get container status \"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e\": rpc error: code = NotFound desc = could not find container \"6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e\": container with ID starting with 6cb27ed4b6be538497424376837b2b2f19fa278f00d45cd81a4cd810c3e9d48e not found: ID does not exist" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.716915 4888 scope.go:117] "RemoveContainer" containerID="f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b" Dec 01 20:43:22 crc kubenswrapper[4888]: E1201 20:43:22.717279 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b\": container with ID starting with f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b not found: ID does not exist" containerID="f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.717317 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b"} err="failed to get container status \"f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b\": rpc error: code = NotFound desc = could not find container \"f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b\": container with ID starting with f65254589c21046849bbef850e395710ccf04284d332b760430ef7c17edcc77b not found: ID does not exist" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.717348 4888 scope.go:117] "RemoveContainer" containerID="1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1" Dec 01 20:43:22 crc kubenswrapper[4888]: E1201 20:43:22.717594 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1\": container with ID starting with 1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1 not found: ID does not exist" containerID="1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1" Dec 01 20:43:22 crc kubenswrapper[4888]: I1201 20:43:22.717623 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1"} err="failed to get container status \"1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1\": rpc error: code = NotFound desc = could not find container \"1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1\": container with ID starting with 1a033791959362dadb3dfad01ff64ec6766742b73e8b26eb190a5a415120c8d1 not found: ID does not exist" Dec 01 20:43:23 crc kubenswrapper[4888]: I1201 20:43:23.018628 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_0e0fe4f5-1c76-4c72-a2cd-aa07f2e4c4ac/memcached/0.log" Dec 01 20:43:24 crc kubenswrapper[4888]: I1201 20:43:24.461059 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" path="/var/lib/kubelet/pods/ee8363ec-0467-4b7f-9b27-b17e8cf7af37/volumes" Dec 01 20:43:24 crc kubenswrapper[4888]: I1201 20:43:24.743745 4888 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:24 crc kubenswrapper[4888]: I1201 20:43:24.804668 4888 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:26 crc kubenswrapper[4888]: I1201 20:43:26.070095 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:26 crc kubenswrapper[4888]: I1201 20:43:26.605610 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4wq24" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="registry-server" containerID="cri-o://e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db" gracePeriod=2 Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.069047 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.146866 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities\") pod \"7247e10f-6986-4742-8f2f-a58d726b09d4\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.147053 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content\") pod \"7247e10f-6986-4742-8f2f-a58d726b09d4\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.147117 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lngxx\" (UniqueName: \"kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx\") pod \"7247e10f-6986-4742-8f2f-a58d726b09d4\" (UID: \"7247e10f-6986-4742-8f2f-a58d726b09d4\") " Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.148468 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities" (OuterVolumeSpecName: "utilities") pod "7247e10f-6986-4742-8f2f-a58d726b09d4" (UID: "7247e10f-6986-4742-8f2f-a58d726b09d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.166581 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx" (OuterVolumeSpecName: "kube-api-access-lngxx") pod "7247e10f-6986-4742-8f2f-a58d726b09d4" (UID: "7247e10f-6986-4742-8f2f-a58d726b09d4"). InnerVolumeSpecName "kube-api-access-lngxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.250078 4888 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.250125 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lngxx\" (UniqueName: \"kubernetes.io/projected/7247e10f-6986-4742-8f2f-a58d726b09d4-kube-api-access-lngxx\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.258148 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7247e10f-6986-4742-8f2f-a58d726b09d4" (UID: "7247e10f-6986-4742-8f2f-a58d726b09d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.352232 4888 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7247e10f-6986-4742-8f2f-a58d726b09d4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.616558 4888 generic.go:334] "Generic (PLEG): container finished" podID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerID="e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db" exitCode=0 Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.616612 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerDied","Data":"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db"} Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.616645 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq24" event={"ID":"7247e10f-6986-4742-8f2f-a58d726b09d4","Type":"ContainerDied","Data":"12803675a5c6639f3501ea5b6ea467b94dbab46a8ea78b6eadc0d6439eeff5e0"} Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.616643 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq24" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.616725 4888 scope.go:117] "RemoveContainer" containerID="e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.645162 4888 scope.go:117] "RemoveContainer" containerID="85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.670231 4888 scope.go:117] "RemoveContainer" containerID="14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.676299 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.686589 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4wq24"] Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.736989 4888 scope.go:117] "RemoveContainer" containerID="e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db" Dec 01 20:43:27 crc kubenswrapper[4888]: E1201 20:43:27.738809 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db\": container with ID starting with e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db not found: ID does not exist" containerID="e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.738851 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db"} err="failed to get container status \"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db\": rpc error: code = NotFound desc = could not find container \"e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db\": container with ID starting with e8302070962b20d54a1a8c77d805a6a5a65068364b41b3f916b0df303d3a27db not found: ID does not exist" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.738879 4888 scope.go:117] "RemoveContainer" containerID="85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2" Dec 01 20:43:27 crc kubenswrapper[4888]: E1201 20:43:27.740399 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2\": container with ID starting with 85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2 not found: ID does not exist" containerID="85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.740435 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2"} err="failed to get container status \"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2\": rpc error: code = NotFound desc = could not find container \"85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2\": container with ID starting with 85f0958af99359c1053340c62d1a26a2744da658799c36d9a13740a5a19521b2 not found: ID does not exist" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.740454 4888 scope.go:117] "RemoveContainer" containerID="14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87" Dec 01 20:43:27 crc kubenswrapper[4888]: E1201 20:43:27.742238 4888 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87\": container with ID starting with 14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87 not found: ID does not exist" containerID="14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87" Dec 01 20:43:27 crc kubenswrapper[4888]: I1201 20:43:27.742266 4888 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87"} err="failed to get container status \"14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87\": rpc error: code = NotFound desc = could not find container \"14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87\": container with ID starting with 14e278c7dfab6bd8e7db65da7240728654fb6d934acb39871a8b9d7becd66f87 not found: ID does not exist" Dec 01 20:43:28 crc kubenswrapper[4888]: I1201 20:43:28.462792 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" path="/var/lib/kubelet/pods/7247e10f-6986-4742-8f2f-a58d726b09d4/volumes" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.422130 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-kjwft_1529e922-fd00-4f32-878a-d8a322a7b6b7/kube-rbac-proxy/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.535937 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-kjwft_1529e922-fd00-4f32-878a-d8a322a7b6b7/manager/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.651629 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-f58dj_a363b1d3-f519-41df-bdf8-e80b83edab4d/kube-rbac-proxy/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.671222 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-f58dj_a363b1d3-f519-41df-bdf8-e80b83edab4d/manager/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.804320 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lbp4z_a0f5d22c-34c3-40c2-889c-b7900120919c/kube-rbac-proxy/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.835507 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-lbp4z_a0f5d22c-34c3-40c2-889c-b7900120919c/manager/0.log" Dec 01 20:43:43 crc kubenswrapper[4888]: I1201 20:43:43.914660 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.114653 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.130236 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.168526 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.290924 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/pull/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.300677 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/util/0.log" Dec 01 20:43:44 crc kubenswrapper[4888]: I1201 20:43:44.308123 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e4eb9d67f45569077a7459cbf78e395265fc25b8dac5136f151f67defd84pd9_48623756-fbaa-4a4f-867a-648ffc6becc9/extract/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.058825 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-jszb6_9f5355dc-205f-4dca-91cf-39209ca1a7b3/kube-rbac-proxy/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.132947 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-jszb6_9f5355dc-205f-4dca-91cf-39209ca1a7b3/manager/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.174781 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-8wh4g_3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2/kube-rbac-proxy/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.284537 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-8wh4g_3d94aa0c-c3c1-4354-8a6e-f6b20a56cdf2/manager/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.334917 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gqwgf_1b3586db-3f49-4ee4-aed0-5e4d469fad92/kube-rbac-proxy/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.426567 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gqwgf_1b3586db-3f49-4ee4-aed0-5e4d469fad92/manager/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.482211 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bjgvv_6cb92420-4e6c-4407-9a54-93f003d1c5e9/kube-rbac-proxy/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.722881 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bjgvv_6cb92420-4e6c-4407-9a54-93f003d1c5e9/manager/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.751371 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5mfth_da594cb2-bb6a-4028-a609-68385c474377/manager/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.753259 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-5mfth_da594cb2-bb6a-4028-a609-68385c474377/kube-rbac-proxy/0.log" Dec 01 20:43:45 crc kubenswrapper[4888]: I1201 20:43:45.924549 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-446zc_b7f34996-fe75-4c30-9e22-022f644f7c89/kube-rbac-proxy/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.015173 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-446zc_b7f34996-fe75-4c30-9e22-022f644f7c89/manager/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.111355 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-sszrn_516e9598-68a9-431a-84af-725e3a053e66/kube-rbac-proxy/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.137163 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-sszrn_516e9598-68a9-431a-84af-725e3a053e66/manager/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.177159 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-pxs95_968d110b-5720-400e-9094-8ec39acb4cf6/kube-rbac-proxy/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.660005 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tcnh9_8ad47b0a-b049-45fa-afea-44eb4d5be85f/kube-rbac-proxy/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.707818 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-pxs95_968d110b-5720-400e-9094-8ec39acb4cf6/manager/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.709756 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-tcnh9_8ad47b0a-b049-45fa-afea-44eb4d5be85f/manager/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.900366 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-z2xkh_4651cf7c-a7f9-4137-9d3b-6a656746f373/kube-rbac-proxy/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.937109 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-z2xkh_4651cf7c-a7f9-4137-9d3b-6a656746f373/manager/0.log" Dec 01 20:43:46 crc kubenswrapper[4888]: I1201 20:43:46.939205 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-psjhd_922916d6-2e57-4087-b5ae-24c6318f180a/kube-rbac-proxy/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.120916 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-psjhd_922916d6-2e57-4087-b5ae-24c6318f180a/manager/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.139028 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp_06a8d696-66ab-49ef-b858-2245cc6e0023/kube-rbac-proxy/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.162347 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4vhhgp_06a8d696-66ab-49ef-b858-2245cc6e0023/manager/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.384018 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-mwxl6_805e9225-e4df-4c8c-b543-29bec3f42292/registry-server/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.472855 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-d5b7696c6-7s4mq_bf82583b-b2be-41bb-af62-24e74142855f/operator/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.540167 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-v2mfg_ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02/kube-rbac-proxy/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.641540 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-v2mfg_ccbeebb6-5cbe-4085-8d23-e18c0bcb7c02/manager/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.785028 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-d4tzz_7914a20a-7747-446f-a496-deecd734fb83/manager/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.807612 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-d4tzz_7914a20a-7747-446f-a496-deecd734fb83/kube-rbac-proxy/0.log" Dec 01 20:43:47 crc kubenswrapper[4888]: I1201 20:43:47.949887 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-ql8v9_e11be1d4-dbcb-4e6b-a97a-918425cb85ce/operator/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.072608 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-f6vhb_1f11c3a5-7276-48d5-9dc1-389ab98ffc11/kube-rbac-proxy/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.140542 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-f6vhb_1f11c3a5-7276-48d5-9dc1-389ab98ffc11/manager/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.233749 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-g79qf_e75f9e52-ceaa-463a-ba65-ed651715c4f4/kube-rbac-proxy/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.356013 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-g79qf_e75f9e52-ceaa-463a-ba65-ed651715c4f4/manager/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.404268 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6c58f9c549-nbj7h_b01f5340-ffdb-4963-9e49-47dad6f75642/manager/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.420353 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-cht8z_b50af81b-6773-46f1-916e-0346848ba65e/kube-rbac-proxy/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.435596 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-cht8z_b50af81b-6773-46f1-916e-0346848ba65e/manager/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.552139 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-ntzlv_f94185be-1233-4c97-add2-b6e2fcd22827/kube-rbac-proxy/0.log" Dec 01 20:43:48 crc kubenswrapper[4888]: I1201 20:43:48.603628 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-ntzlv_f94185be-1233-4c97-add2-b6e2fcd22827/manager/0.log" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.037287 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.037587 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.037629 4888 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.038344 4888 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b"} pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.038401 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" containerID="cri-o://ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" gracePeriod=600 Dec 01 20:43:50 crc kubenswrapper[4888]: E1201 20:43:50.178076 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.857426 4888 generic.go:334] "Generic (PLEG): container finished" podID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" exitCode=0 Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.857477 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerDied","Data":"ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b"} Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.857525 4888 scope.go:117] "RemoveContainer" containerID="ca06a1bc12ec94ca62f6ad0f98fac2bb57050181d236746cd937884b545f9791" Dec 01 20:43:50 crc kubenswrapper[4888]: I1201 20:43:50.858904 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:43:50 crc kubenswrapper[4888]: E1201 20:43:50.861566 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:01 crc kubenswrapper[4888]: I1201 20:44:01.451758 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:01 crc kubenswrapper[4888]: E1201 20:44:01.452531 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:07 crc kubenswrapper[4888]: I1201 20:44:07.433236 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-42k5p_50b1e183-9a9a-4daa-a769-78bc53d20c41/control-plane-machine-set-operator/0.log" Dec 01 20:44:07 crc kubenswrapper[4888]: I1201 20:44:07.584344 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2w272_60a42fba-5e64-4a68-a9a3-e29ff836d97f/kube-rbac-proxy/0.log" Dec 01 20:44:07 crc kubenswrapper[4888]: I1201 20:44:07.631529 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2w272_60a42fba-5e64-4a68-a9a3-e29ff836d97f/machine-api-operator/0.log" Dec 01 20:44:13 crc kubenswrapper[4888]: I1201 20:44:13.451084 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:13 crc kubenswrapper[4888]: E1201 20:44:13.451962 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:19 crc kubenswrapper[4888]: I1201 20:44:19.437328 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-td6nx_68662703-dd1d-4a5f-8884-d79b491c4fe2/cert-manager-controller/0.log" Dec 01 20:44:19 crc kubenswrapper[4888]: I1201 20:44:19.628891 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-llsnr_5695a357-bd5b-42c3-952b-f2be7e800dce/cert-manager-cainjector/0.log" Dec 01 20:44:19 crc kubenswrapper[4888]: I1201 20:44:19.652222 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-6mj44_fe6ae8a5-6bae-469d-a22f-6cbeb2cc3858/cert-manager-webhook/0.log" Dec 01 20:44:24 crc kubenswrapper[4888]: I1201 20:44:24.451266 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:24 crc kubenswrapper[4888]: E1201 20:44:24.451969 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.074714 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-jdzp6_e8b41a7b-e30b-40a3-9d94-89af1c9623b6/nmstate-console-plugin/0.log" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.253844 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-frhtc_a9567735-6e3f-46d7-aa56-837398be488b/nmstate-handler/0.log" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.285161 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qddf7_bc82e383-0b0c-4f71-84b0-8c1de3ba240a/kube-rbac-proxy/0.log" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.310524 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-qddf7_bc82e383-0b0c-4f71-84b0-8c1de3ba240a/nmstate-metrics/0.log" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.439285 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-xjmvl_d4346417-1916-4764-949c-3f2a628501e1/nmstate-operator/0.log" Dec 01 20:44:32 crc kubenswrapper[4888]: I1201 20:44:32.481316 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-k2bjh_398ab1db-126e-4ea0-b429-a7563f68c127/nmstate-webhook/0.log" Dec 01 20:44:36 crc kubenswrapper[4888]: I1201 20:44:36.451716 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:36 crc kubenswrapper[4888]: E1201 20:44:36.452423 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.393433 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5s49p_5234563a-ff0f-42ed-b8da-24b76dc29ebc/kube-rbac-proxy/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.450784 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:47 crc kubenswrapper[4888]: E1201 20:44:47.451055 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.462362 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5s49p_5234563a-ff0f-42ed-b8da-24b76dc29ebc/controller/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.566314 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-wlhmh_76b0bcdf-1744-4b10-8576-7bf114e2ec63/frr-k8s-webhook-server/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.643523 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.791970 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.792118 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.819245 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.847377 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:44:47 crc kubenswrapper[4888]: I1201 20:44:47.990035 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.013995 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.014097 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.027892 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.207419 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-frr-files/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.251686 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-reloader/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.292344 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/cp-metrics/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.295456 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/controller/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.455592 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/frr-metrics/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.463229 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/kube-rbac-proxy/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.510229 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/kube-rbac-proxy-frr/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.686126 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/reloader/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.764300 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-86c8597757-vv42m_a9e96183-2604-4b4c-bc23-a48485783f33/manager/0.log" Dec 01 20:44:48 crc kubenswrapper[4888]: I1201 20:44:48.956125 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-75d75bc95b-g8xkf_9a88f138-a2b0-4826-8bba-dd3b7942d88b/webhook-server/0.log" Dec 01 20:44:49 crc kubenswrapper[4888]: I1201 20:44:49.206801 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9zffn_84659dfe-1cdd-43b7-bb53-8adbf22e4c20/kube-rbac-proxy/0.log" Dec 01 20:44:49 crc kubenswrapper[4888]: I1201 20:44:49.679547 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-9zffn_84659dfe-1cdd-43b7-bb53-8adbf22e4c20/speaker/0.log" Dec 01 20:44:49 crc kubenswrapper[4888]: I1201 20:44:49.838624 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-wjv4g_82396ea9-c5dd-4464-87f8-972b933e048e/frr/0.log" Dec 01 20:44:58 crc kubenswrapper[4888]: I1201 20:44:58.452318 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:44:58 crc kubenswrapper[4888]: E1201 20:44:58.453357 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.180908 4888 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7"] Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181477 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181504 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181521 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181529 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181556 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="extract-utilities" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181565 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="extract-utilities" Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181572 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="extract-content" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181582 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="extract-content" Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181597 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="extract-utilities" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181604 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="extract-utilities" Dec 01 20:45:00 crc kubenswrapper[4888]: E1201 20:45:00.181641 4888 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="extract-content" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181649 4888 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="extract-content" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181878 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee8363ec-0467-4b7f-9b27-b17e8cf7af37" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.181914 4888 memory_manager.go:354] "RemoveStaleState removing state" podUID="7247e10f-6986-4742-8f2f-a58d726b09d4" containerName="registry-server" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.182722 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.185684 4888 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.185713 4888 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.192227 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7"] Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.272663 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhk68\" (UniqueName: \"kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.273060 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.273269 4888 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.375449 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhk68\" (UniqueName: \"kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.375590 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.375638 4888 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.376752 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.382256 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.391924 4888 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhk68\" (UniqueName: \"kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68\") pod \"collect-profiles-29410365-626q7\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.549589 4888 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:00 crc kubenswrapper[4888]: I1201 20:45:00.986227 4888 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7"] Dec 01 20:45:00 crc kubenswrapper[4888]: W1201 20:45:00.987419 4888 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06e3803d_e495_491f_8dba_82f831a1627a.slice/crio-e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891 WatchSource:0}: Error finding container e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891: Status 404 returned error can't find the container with id e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891 Dec 01 20:45:01 crc kubenswrapper[4888]: I1201 20:45:01.494953 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" event={"ID":"06e3803d-e495-491f-8dba-82f831a1627a","Type":"ContainerStarted","Data":"a9f25213c10b4ebbffe6a00c9bfd6831d1a0280a22ee8fe823eeca32ad8659d5"} Dec 01 20:45:01 crc kubenswrapper[4888]: I1201 20:45:01.494996 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" event={"ID":"06e3803d-e495-491f-8dba-82f831a1627a","Type":"ContainerStarted","Data":"e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891"} Dec 01 20:45:01 crc kubenswrapper[4888]: I1201 20:45:01.512864 4888 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" podStartSLOduration=1.512821383 podStartE2EDuration="1.512821383s" podCreationTimestamp="2025-12-01 20:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 20:45:01.507135453 +0000 UTC m=+4301.378165367" watchObservedRunningTime="2025-12-01 20:45:01.512821383 +0000 UTC m=+4301.383851297" Dec 01 20:45:02 crc kubenswrapper[4888]: I1201 20:45:02.505809 4888 generic.go:334] "Generic (PLEG): container finished" podID="06e3803d-e495-491f-8dba-82f831a1627a" containerID="a9f25213c10b4ebbffe6a00c9bfd6831d1a0280a22ee8fe823eeca32ad8659d5" exitCode=0 Dec 01 20:45:02 crc kubenswrapper[4888]: I1201 20:45:02.505867 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" event={"ID":"06e3803d-e495-491f-8dba-82f831a1627a","Type":"ContainerDied","Data":"a9f25213c10b4ebbffe6a00c9bfd6831d1a0280a22ee8fe823eeca32ad8659d5"} Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.916377 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.940581 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume\") pod \"06e3803d-e495-491f-8dba-82f831a1627a\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.940710 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhk68\" (UniqueName: \"kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68\") pod \"06e3803d-e495-491f-8dba-82f831a1627a\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.940871 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume\") pod \"06e3803d-e495-491f-8dba-82f831a1627a\" (UID: \"06e3803d-e495-491f-8dba-82f831a1627a\") " Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.942807 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume" (OuterVolumeSpecName: "config-volume") pod "06e3803d-e495-491f-8dba-82f831a1627a" (UID: "06e3803d-e495-491f-8dba-82f831a1627a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.953429 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "06e3803d-e495-491f-8dba-82f831a1627a" (UID: "06e3803d-e495-491f-8dba-82f831a1627a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 20:45:03 crc kubenswrapper[4888]: I1201 20:45:03.955971 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68" (OuterVolumeSpecName: "kube-api-access-dhk68") pod "06e3803d-e495-491f-8dba-82f831a1627a" (UID: "06e3803d-e495-491f-8dba-82f831a1627a"). InnerVolumeSpecName "kube-api-access-dhk68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.042328 4888 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06e3803d-e495-491f-8dba-82f831a1627a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.042367 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhk68\" (UniqueName: \"kubernetes.io/projected/06e3803d-e495-491f-8dba-82f831a1627a-kube-api-access-dhk68\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.042381 4888 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06e3803d-e495-491f-8dba-82f831a1627a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.529960 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" event={"ID":"06e3803d-e495-491f-8dba-82f831a1627a","Type":"ContainerDied","Data":"e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891"} Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.529997 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e008befe27ae2293faf3deda25caff1fb5330dab7b5180db1d0032385a667891" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.530005 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29410365-626q7" Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.582232 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk"] Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.590135 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29410320-qcfpk"] Dec 01 20:45:04 crc kubenswrapper[4888]: I1201 20:45:04.960754 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.157267 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.190913 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.229176 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.367330 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.370861 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/pull/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.387333 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fwd2k2_3e000b33-9ace-4e1d-b43b-a884375df712/extract/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.551231 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.710851 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.726094 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.734223 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.884642 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/util/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.911780 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/extract/0.log" Dec 01 20:45:05 crc kubenswrapper[4888]: I1201 20:45:05.950212 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f836tg69_8920f51c-9abd-44ee-8418-0f8faa197a1e/pull/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.047852 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.207395 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.253695 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.258678 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.392405 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-utilities/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.408649 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/extract-content/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.460296 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="690f6f48-b606-4db9-b569-3bfbec27a013" path="/var/lib/kubelet/pods/690f6f48-b606-4db9-b569-3bfbec27a013/volumes" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.662616 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.769023 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.791415 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.890344 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:45:06 crc kubenswrapper[4888]: I1201 20:45:06.966864 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-vvwwp_3249d3f3-6a27-4acd-8d8b-4c5360dc7f43/registry-server/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.122546 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-content/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.168884 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/extract-utilities/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.280884 4888 scope.go:117] "RemoveContainer" containerID="ae17c5455f1b9402ed9930ae1cf7b00fbeeaea29a27755011351dd4af56719ff" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.359955 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/2.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.437996 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-w5r85_08dcfb84-e006-4100-8a3a-26dc77a68e61/marketplace-operator/1.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.602339 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.737840 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5ntwc_8c488b93-cfe9-4b9e-9299-1e2b011e84bc/registry-server/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.837437 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.844868 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.845329 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.988890 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-utilities/0.log" Dec 01 20:45:07 crc kubenswrapper[4888]: I1201 20:45:07.995070 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/extract-content/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.193003 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gcn8p_42ea68bc-ce83-490b-89ab-30ac5124fb9e/registry-server/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.219341 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.379338 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.384131 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.405980 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.573672 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-utilities/0.log" Dec 01 20:45:08 crc kubenswrapper[4888]: I1201 20:45:08.604738 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/extract-content/0.log" Dec 01 20:45:09 crc kubenswrapper[4888]: I1201 20:45:09.156951 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jp5pm_a1f98748-d4c4-4e86-93a3-5ebe405250ee/registry-server/0.log" Dec 01 20:45:09 crc kubenswrapper[4888]: I1201 20:45:09.450998 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:45:09 crc kubenswrapper[4888]: E1201 20:45:09.451530 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:45:21 crc kubenswrapper[4888]: I1201 20:45:21.452015 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:45:21 crc kubenswrapper[4888]: E1201 20:45:21.452765 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:45:32 crc kubenswrapper[4888]: I1201 20:45:32.451479 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:45:32 crc kubenswrapper[4888]: E1201 20:45:32.452320 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:45:43 crc kubenswrapper[4888]: I1201 20:45:43.451854 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:45:43 crc kubenswrapper[4888]: E1201 20:45:43.452764 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:45:56 crc kubenswrapper[4888]: I1201 20:45:56.450962 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:45:56 crc kubenswrapper[4888]: E1201 20:45:56.451854 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:46:07 crc kubenswrapper[4888]: I1201 20:46:07.451557 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:46:07 crc kubenswrapper[4888]: E1201 20:46:07.452352 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:46:21 crc kubenswrapper[4888]: I1201 20:46:21.452147 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:46:21 crc kubenswrapper[4888]: E1201 20:46:21.452841 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:46:34 crc kubenswrapper[4888]: I1201 20:46:34.451511 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:46:34 crc kubenswrapper[4888]: E1201 20:46:34.452177 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:46:46 crc kubenswrapper[4888]: I1201 20:46:46.451770 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:46:46 crc kubenswrapper[4888]: E1201 20:46:46.453698 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:46:53 crc kubenswrapper[4888]: I1201 20:46:53.555888 4888 generic.go:334] "Generic (PLEG): container finished" podID="1c9cf28e-18b4-4606-bfc1-51c5093777b5" containerID="2f55238413978a0d4c3bd844be2eac27a0b86dc30264ec073c3f0046cf972b58" exitCode=0 Dec 01 20:46:53 crc kubenswrapper[4888]: I1201 20:46:53.556020 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" event={"ID":"1c9cf28e-18b4-4606-bfc1-51c5093777b5","Type":"ContainerDied","Data":"2f55238413978a0d4c3bd844be2eac27a0b86dc30264ec073c3f0046cf972b58"} Dec 01 20:46:53 crc kubenswrapper[4888]: I1201 20:46:53.557004 4888 scope.go:117] "RemoveContainer" containerID="2f55238413978a0d4c3bd844be2eac27a0b86dc30264ec073c3f0046cf972b58" Dec 01 20:46:54 crc kubenswrapper[4888]: I1201 20:46:54.121781 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wc2rz_must-gather-c9nvr_1c9cf28e-18b4-4606-bfc1-51c5093777b5/gather/0.log" Dec 01 20:47:00 crc kubenswrapper[4888]: I1201 20:47:00.457397 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:47:00 crc kubenswrapper[4888]: E1201 20:47:00.458427 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.252176 4888 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wc2rz/must-gather-c9nvr"] Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.253001 4888 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" podUID="1c9cf28e-18b4-4606-bfc1-51c5093777b5" containerName="copy" containerID="cri-o://9d9858a43bcae0a0b1293d112698729efc90ffb291c8db61e48828e8b3e44203" gracePeriod=2 Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.262383 4888 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wc2rz/must-gather-c9nvr"] Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.678989 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wc2rz_must-gather-c9nvr_1c9cf28e-18b4-4606-bfc1-51c5093777b5/copy/0.log" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.679262 4888 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wc2rz_must-gather-c9nvr_1c9cf28e-18b4-4606-bfc1-51c5093777b5/copy/0.log" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.679652 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.679696 4888 generic.go:334] "Generic (PLEG): container finished" podID="1c9cf28e-18b4-4606-bfc1-51c5093777b5" containerID="9d9858a43bcae0a0b1293d112698729efc90ffb291c8db61e48828e8b3e44203" exitCode=143 Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.679734 4888 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="511a4913b2c8c67471d226605f3bb40cd871caf920ddbe34b6913939d3180061" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.842366 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzc44\" (UniqueName: \"kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44\") pod \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.842445 4888 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output\") pod \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\" (UID: \"1c9cf28e-18b4-4606-bfc1-51c5093777b5\") " Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.853016 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44" (OuterVolumeSpecName: "kube-api-access-pzc44") pod "1c9cf28e-18b4-4606-bfc1-51c5093777b5" (UID: "1c9cf28e-18b4-4606-bfc1-51c5093777b5"). InnerVolumeSpecName "kube-api-access-pzc44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.945040 4888 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzc44\" (UniqueName: \"kubernetes.io/projected/1c9cf28e-18b4-4606-bfc1-51c5093777b5-kube-api-access-pzc44\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:04 crc kubenswrapper[4888]: I1201 20:47:04.998887 4888 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1c9cf28e-18b4-4606-bfc1-51c5093777b5" (UID: "1c9cf28e-18b4-4606-bfc1-51c5093777b5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 20:47:05 crc kubenswrapper[4888]: I1201 20:47:05.047326 4888 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1c9cf28e-18b4-4606-bfc1-51c5093777b5-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 20:47:05 crc kubenswrapper[4888]: I1201 20:47:05.686454 4888 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wc2rz/must-gather-c9nvr" Dec 01 20:47:06 crc kubenswrapper[4888]: I1201 20:47:06.461870 4888 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c9cf28e-18b4-4606-bfc1-51c5093777b5" path="/var/lib/kubelet/pods/1c9cf28e-18b4-4606-bfc1-51c5093777b5/volumes" Dec 01 20:47:14 crc kubenswrapper[4888]: I1201 20:47:14.451705 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:47:14 crc kubenswrapper[4888]: E1201 20:47:14.452592 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:47:29 crc kubenswrapper[4888]: I1201 20:47:29.451921 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:47:29 crc kubenswrapper[4888]: E1201 20:47:29.452806 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:47:44 crc kubenswrapper[4888]: I1201 20:47:44.452461 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:47:44 crc kubenswrapper[4888]: E1201 20:47:44.453506 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:47:58 crc kubenswrapper[4888]: I1201 20:47:58.451870 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:47:58 crc kubenswrapper[4888]: E1201 20:47:58.452577 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:48:07 crc kubenswrapper[4888]: I1201 20:48:07.360921 4888 scope.go:117] "RemoveContainer" containerID="9d9858a43bcae0a0b1293d112698729efc90ffb291c8db61e48828e8b3e44203" Dec 01 20:48:07 crc kubenswrapper[4888]: I1201 20:48:07.395574 4888 scope.go:117] "RemoveContainer" containerID="2f55238413978a0d4c3bd844be2eac27a0b86dc30264ec073c3f0046cf972b58" Dec 01 20:48:10 crc kubenswrapper[4888]: I1201 20:48:10.461906 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:48:10 crc kubenswrapper[4888]: E1201 20:48:10.464088 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:48:25 crc kubenswrapper[4888]: I1201 20:48:25.451543 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:48:25 crc kubenswrapper[4888]: E1201 20:48:25.452282 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:48:39 crc kubenswrapper[4888]: I1201 20:48:39.451959 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:48:39 crc kubenswrapper[4888]: E1201 20:48:39.452681 4888 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jcmzp_openshift-machine-config-operator(6a551e8a-d979-4cdb-87f5-1075b0b49a36)\"" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" Dec 01 20:48:50 crc kubenswrapper[4888]: I1201 20:48:50.458505 4888 scope.go:117] "RemoveContainer" containerID="ad5cd90771903175a05deb1a22d36630ddab0cdbb5de33afbdb778f52089fd1b" Dec 01 20:48:51 crc kubenswrapper[4888]: I1201 20:48:51.655759 4888 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" event={"ID":"6a551e8a-d979-4cdb-87f5-1075b0b49a36","Type":"ContainerStarted","Data":"cabd285ebf66259cb9acde6b3153ac00eecc4e80581584e438c8d733f4012243"} Dec 01 20:50:50 crc kubenswrapper[4888]: I1201 20:50:50.037423 4888 patch_prober.go:28] interesting pod/machine-config-daemon-jcmzp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 20:50:50 crc kubenswrapper[4888]: I1201 20:50:50.039565 4888 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jcmzp" podUID="6a551e8a-d979-4cdb-87f5-1075b0b49a36" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113377660024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113377661017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113366246016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113366246015464 5ustar corecore